var/home/core/zuul-output/0000755000175000017500000000000015116560425014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116563740015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000001545671515116563731017723 0ustar rootrootDec 11 15:23:08 crc systemd[1]: Starting Kubernetes Kubelet... Dec 11 15:23:08 crc restorecon[4688]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:08 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 15:23:09 crc restorecon[4688]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 11 15:23:09 crc kubenswrapper[4723]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.380889 4723 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384579 4723 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384607 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384613 4723 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384620 4723 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384626 4723 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384633 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384639 4723 feature_gate.go:330] unrecognized feature gate: Example Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384644 4723 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384650 4723 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384657 4723 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384662 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384668 4723 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384673 4723 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384679 4723 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384684 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384699 4723 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384705 4723 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384711 4723 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384717 4723 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384723 4723 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384728 4723 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384733 4723 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384738 4723 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384743 4723 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384749 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384754 4723 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384759 4723 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384764 4723 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384769 4723 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384775 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384781 4723 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384786 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384791 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384798 4723 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384805 4723 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384815 4723 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384823 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384832 4723 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384840 4723 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384848 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384856 4723 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384864 4723 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384870 4723 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384877 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384883 4723 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384890 4723 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384896 4723 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384902 4723 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384909 4723 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384916 4723 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384923 4723 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384932 4723 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384937 4723 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384943 4723 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384948 4723 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384953 4723 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384958 4723 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384987 4723 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.384995 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385001 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385008 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385014 4723 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385020 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385025 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385030 4723 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385035 4723 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385040 4723 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385046 4723 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385051 4723 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385208 4723 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.385215 4723 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385497 4723 flags.go:64] FLAG: --address="0.0.0.0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385516 4723 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385526 4723 flags.go:64] FLAG: --anonymous-auth="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385534 4723 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385544 4723 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385550 4723 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385558 4723 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385566 4723 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385573 4723 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385579 4723 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385585 4723 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385592 4723 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385598 4723 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385604 4723 flags.go:64] FLAG: --cgroup-root="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385610 4723 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385616 4723 flags.go:64] FLAG: --client-ca-file="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385624 4723 flags.go:64] FLAG: --cloud-config="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385630 4723 flags.go:64] FLAG: --cloud-provider="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385636 4723 flags.go:64] FLAG: --cluster-dns="[]" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385650 4723 flags.go:64] FLAG: --cluster-domain="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385656 4723 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385662 4723 flags.go:64] FLAG: --config-dir="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385668 4723 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385675 4723 flags.go:64] FLAG: --container-log-max-files="5" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385682 4723 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385689 4723 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385695 4723 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385702 4723 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385708 4723 flags.go:64] FLAG: --contention-profiling="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385714 4723 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385720 4723 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385726 4723 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385732 4723 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385741 4723 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385747 4723 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385753 4723 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385760 4723 flags.go:64] FLAG: --enable-load-reader="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385766 4723 flags.go:64] FLAG: --enable-server="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385773 4723 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385781 4723 flags.go:64] FLAG: --event-burst="100" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385787 4723 flags.go:64] FLAG: --event-qps="50" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385793 4723 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385800 4723 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385806 4723 flags.go:64] FLAG: --eviction-hard="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385813 4723 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385819 4723 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385825 4723 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385832 4723 flags.go:64] FLAG: --eviction-soft="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385838 4723 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385844 4723 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385850 4723 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385856 4723 flags.go:64] FLAG: --experimental-mounter-path="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385863 4723 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385869 4723 flags.go:64] FLAG: --fail-swap-on="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385875 4723 flags.go:64] FLAG: --feature-gates="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385883 4723 flags.go:64] FLAG: --file-check-frequency="20s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385890 4723 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385898 4723 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385906 4723 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385915 4723 flags.go:64] FLAG: --healthz-port="10248" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385923 4723 flags.go:64] FLAG: --help="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385930 4723 flags.go:64] FLAG: --hostname-override="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385937 4723 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385945 4723 flags.go:64] FLAG: --http-check-frequency="20s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385953 4723 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385960 4723 flags.go:64] FLAG: --image-credential-provider-config="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.385994 4723 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386003 4723 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386013 4723 flags.go:64] FLAG: --image-service-endpoint="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386021 4723 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386028 4723 flags.go:64] FLAG: --kube-api-burst="100" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386036 4723 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386046 4723 flags.go:64] FLAG: --kube-api-qps="50" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386061 4723 flags.go:64] FLAG: --kube-reserved="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386070 4723 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386078 4723 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386086 4723 flags.go:64] FLAG: --kubelet-cgroups="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386093 4723 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386101 4723 flags.go:64] FLAG: --lock-file="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386108 4723 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386118 4723 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386126 4723 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386138 4723 flags.go:64] FLAG: --log-json-split-stream="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386145 4723 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386153 4723 flags.go:64] FLAG: --log-text-split-stream="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386160 4723 flags.go:64] FLAG: --logging-format="text" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386168 4723 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386177 4723 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386186 4723 flags.go:64] FLAG: --manifest-url="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386193 4723 flags.go:64] FLAG: --manifest-url-header="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386214 4723 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386224 4723 flags.go:64] FLAG: --max-open-files="1000000" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386234 4723 flags.go:64] FLAG: --max-pods="110" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386242 4723 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386250 4723 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386258 4723 flags.go:64] FLAG: --memory-manager-policy="None" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386266 4723 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386275 4723 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386282 4723 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386291 4723 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386310 4723 flags.go:64] FLAG: --node-status-max-images="50" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386317 4723 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386325 4723 flags.go:64] FLAG: --oom-score-adj="-999" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386332 4723 flags.go:64] FLAG: --pod-cidr="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386339 4723 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386350 4723 flags.go:64] FLAG: --pod-manifest-path="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386357 4723 flags.go:64] FLAG: --pod-max-pids="-1" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386365 4723 flags.go:64] FLAG: --pods-per-core="0" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386372 4723 flags.go:64] FLAG: --port="10250" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386380 4723 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386388 4723 flags.go:64] FLAG: --provider-id="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386396 4723 flags.go:64] FLAG: --qos-reserved="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386404 4723 flags.go:64] FLAG: --read-only-port="10255" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386412 4723 flags.go:64] FLAG: --register-node="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386420 4723 flags.go:64] FLAG: --register-schedulable="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386428 4723 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386441 4723 flags.go:64] FLAG: --registry-burst="10" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386449 4723 flags.go:64] FLAG: --registry-qps="5" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386457 4723 flags.go:64] FLAG: --reserved-cpus="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386464 4723 flags.go:64] FLAG: --reserved-memory="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386474 4723 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386482 4723 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386490 4723 flags.go:64] FLAG: --rotate-certificates="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386496 4723 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386505 4723 flags.go:64] FLAG: --runonce="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386511 4723 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386518 4723 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386524 4723 flags.go:64] FLAG: --seccomp-default="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386530 4723 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386536 4723 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386543 4723 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386550 4723 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386557 4723 flags.go:64] FLAG: --storage-driver-password="root" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386563 4723 flags.go:64] FLAG: --storage-driver-secure="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386569 4723 flags.go:64] FLAG: --storage-driver-table="stats" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386575 4723 flags.go:64] FLAG: --storage-driver-user="root" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386582 4723 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386589 4723 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386595 4723 flags.go:64] FLAG: --system-cgroups="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386601 4723 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386611 4723 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386617 4723 flags.go:64] FLAG: --tls-cert-file="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386623 4723 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386638 4723 flags.go:64] FLAG: --tls-min-version="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386644 4723 flags.go:64] FLAG: --tls-private-key-file="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386650 4723 flags.go:64] FLAG: --topology-manager-policy="none" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386656 4723 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386664 4723 flags.go:64] FLAG: --topology-manager-scope="container" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386670 4723 flags.go:64] FLAG: --v="2" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386678 4723 flags.go:64] FLAG: --version="false" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386687 4723 flags.go:64] FLAG: --vmodule="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386694 4723 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.386701 4723 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386847 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386854 4723 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386859 4723 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386865 4723 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386871 4723 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386877 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386882 4723 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386888 4723 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386894 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386899 4723 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386905 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386910 4723 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386915 4723 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386920 4723 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386925 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386931 4723 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386936 4723 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386941 4723 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386946 4723 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386952 4723 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386957 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386963 4723 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.386997 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387004 4723 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387009 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387015 4723 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387021 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387027 4723 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387032 4723 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387037 4723 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387042 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387048 4723 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387053 4723 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387058 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387064 4723 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387069 4723 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387075 4723 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387080 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387086 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387091 4723 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387096 4723 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387104 4723 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387111 4723 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387117 4723 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387123 4723 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387129 4723 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387134 4723 feature_gate.go:330] unrecognized feature gate: Example Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387139 4723 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387147 4723 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387156 4723 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387306 4723 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387322 4723 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387330 4723 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387337 4723 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387346 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387352 4723 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387359 4723 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387365 4723 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387370 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387419 4723 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387425 4723 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387434 4723 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387440 4723 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387444 4723 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387448 4723 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387452 4723 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387456 4723 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387465 4723 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387471 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387475 4723 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.387482 4723 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.387511 4723 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.399955 4723 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.400024 4723 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400140 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400156 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400165 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400173 4723 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400180 4723 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400186 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400191 4723 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400197 4723 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400202 4723 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400208 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400213 4723 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400218 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400223 4723 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400229 4723 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400234 4723 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400239 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400244 4723 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400249 4723 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400255 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400260 4723 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400266 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400271 4723 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400276 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400282 4723 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400287 4723 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400293 4723 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400298 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400303 4723 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400309 4723 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400314 4723 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400319 4723 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400327 4723 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400332 4723 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400340 4723 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400350 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400356 4723 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400362 4723 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400368 4723 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400373 4723 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400379 4723 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400386 4723 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400393 4723 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400400 4723 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400406 4723 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400413 4723 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400419 4723 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400425 4723 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400431 4723 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400437 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400443 4723 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400448 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400453 4723 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400459 4723 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400464 4723 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400471 4723 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400478 4723 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400484 4723 feature_gate.go:330] unrecognized feature gate: Example Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400490 4723 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400496 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400501 4723 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400507 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400514 4723 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400522 4723 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400528 4723 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400533 4723 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400539 4723 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400545 4723 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400550 4723 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400555 4723 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400561 4723 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400566 4723 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.400576 4723 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400759 4723 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400770 4723 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400776 4723 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400782 4723 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400787 4723 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400792 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400797 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400804 4723 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400809 4723 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400815 4723 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400820 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400825 4723 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400830 4723 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400836 4723 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400841 4723 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400846 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400852 4723 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400857 4723 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400864 4723 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400870 4723 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400876 4723 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400882 4723 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400887 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400893 4723 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400898 4723 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400903 4723 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400909 4723 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400914 4723 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400920 4723 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400925 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400931 4723 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400936 4723 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400941 4723 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400947 4723 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400952 4723 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400957 4723 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400962 4723 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400988 4723 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400994 4723 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.400999 4723 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401006 4723 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401012 4723 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401024 4723 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401050 4723 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401057 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401065 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401071 4723 feature_gate.go:330] unrecognized feature gate: Example Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401078 4723 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401088 4723 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401096 4723 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401104 4723 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401139 4723 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401148 4723 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401154 4723 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401161 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401168 4723 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401175 4723 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401183 4723 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401189 4723 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401196 4723 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401202 4723 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401210 4723 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401218 4723 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401226 4723 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401231 4723 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401237 4723 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401243 4723 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401248 4723 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401253 4723 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401258 4723 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.401263 4723 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.401273 4723 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.402208 4723 server.go:940] "Client rotation is on, will bootstrap in background" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.406703 4723 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.406825 4723 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.407465 4723 server.go:997] "Starting client certificate rotation" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.407502 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.407767 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-16 02:00:41.663679898 +0000 UTC Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.407982 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.413381 4723 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.415469 4723 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.415799 4723 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.424221 4723 log.go:25] "Validated CRI v1 runtime API" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.444613 4723 log.go:25] "Validated CRI v1 image API" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.448292 4723 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.452129 4723 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-11-15-18-54-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.452189 4723 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.472419 4723 manager.go:217] Machine: {Timestamp:2025-12-11 15:23:09.471145803 +0000 UTC m=+0.245379258 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d21492ba-341a-4bd8-a181-6cf6c9f891ab BootID:53ba6dcc-e071-4d76-aa81-b9571675a615 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e4:0e:be Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e4:0e:be Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:be:3d:9e Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:3d:56:7c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:4e:c2:d0 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:8b:4d:f3 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:bd:6b:57:03:a9 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:3a:bd:80:1b:e9:a8 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.472675 4723 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.472849 4723 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.473559 4723 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.473833 4723 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.473895 4723 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.474191 4723 topology_manager.go:138] "Creating topology manager with none policy" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.474204 4723 container_manager_linux.go:303] "Creating device plugin manager" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.474457 4723 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.474493 4723 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.474861 4723 state_mem.go:36] "Initialized new in-memory state store" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.475351 4723 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.475942 4723 kubelet.go:418] "Attempting to sync node with API server" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.475982 4723 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.476011 4723 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.476029 4723 kubelet.go:324] "Adding apiserver pod source" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.476042 4723 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.477872 4723 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.478268 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.478320 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.478413 4723 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.478471 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.478385 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479191 4723 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479704 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479734 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479743 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479753 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479767 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479777 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479786 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479800 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479811 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479821 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479834 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.479844 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.480257 4723 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.480914 4723 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.482053 4723 server.go:1280] "Started kubelet" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.482420 4723 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.482442 4723 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.483700 4723 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 11 15:23:09 crc systemd[1]: Started Kubernetes Kubelet. Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.486451 4723 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.486496 4723 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.486688 4723 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 08:53:53.855366737 +0000 UTC Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.487419 4723 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.487442 4723 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.487849 4723 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.487602 4723 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.151:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1880328260e4f81b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 15:23:09.481990171 +0000 UTC m=+0.256223606,LastTimestamp:2025-12-11 15:23:09.481990171 +0000 UTC m=+0.256223606,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.488386 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="200ms" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.489751 4723 server.go:460] "Adding debug handlers to kubelet server" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.490060 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.490182 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.490284 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.498068 4723 factory.go:55] Registering systemd factory Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.498250 4723 factory.go:221] Registration of the systemd container factory successfully Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.499788 4723 factory.go:153] Registering CRI-O factory Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.499850 4723 factory.go:221] Registration of the crio container factory successfully Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.499958 4723 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.500838 4723 factory.go:103] Registering Raw factory Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.500892 4723 manager.go:1196] Started watching for new ooms in manager Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.501818 4723 manager.go:319] Starting recovery of all containers Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508011 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508088 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508105 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508118 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508130 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508143 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508155 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508167 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508187 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508207 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508227 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508242 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508264 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508281 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508298 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508310 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508323 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508361 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.508371 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509498 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509517 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509528 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509542 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509555 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509571 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.509582 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.510223 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.515215 4723 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.515605 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.515719 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.515809 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.515917 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516221 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516321 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516412 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516508 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516613 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516706 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516817 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.516914 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517051 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517144 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517233 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517385 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517496 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517655 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517793 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517807 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517821 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517833 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517845 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517858 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517871 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517928 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.517993 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518015 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518034 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518045 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518057 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518067 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518080 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518091 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518128 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518142 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518159 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518171 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518183 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518195 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518207 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518271 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518297 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518309 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518321 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518360 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518372 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518382 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518407 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518419 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518482 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518497 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518507 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518519 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518574 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518585 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518631 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518658 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518683 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518694 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518705 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518717 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518727 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518739 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518777 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518787 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518812 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518821 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518860 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518869 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518880 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518889 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518899 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518913 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518937 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518946 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.518956 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519123 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519146 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519163 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519192 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519209 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519302 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519325 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519339 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519355 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519367 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519377 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519387 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519412 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519436 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519448 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519500 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519511 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519521 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519530 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519541 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519552 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519611 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519627 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519635 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519645 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519660 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519681 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519724 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519736 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519759 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519769 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519780 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519789 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519799 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519809 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519869 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519879 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519903 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519915 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519930 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519939 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.519948 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520124 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520250 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520280 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520306 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520316 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520325 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520335 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520362 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520411 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520422 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520436 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520466 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520475 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520485 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520494 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520503 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520531 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520540 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520549 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520601 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520611 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520650 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520661 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520676 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520685 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520711 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520721 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520749 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520763 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520842 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520853 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520864 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520893 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520904 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520939 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.520994 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521012 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521025 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521038 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521137 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521151 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521164 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521185 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521237 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521250 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521264 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521276 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521288 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521346 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521377 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521390 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521419 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521432 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521444 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521455 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521481 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521494 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521622 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521645 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521696 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521714 4723 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521728 4723 reconstruct.go:97] "Volume reconstruction finished" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.521739 4723 reconciler.go:26] "Reconciler: start to sync state" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.526740 4723 manager.go:324] Recovery completed Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.542113 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.543739 4723 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.544957 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.545141 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.545152 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.546844 4723 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.546889 4723 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.546924 4723 kubelet.go:2335] "Starting kubelet main sync loop" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.546988 4723 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.547498 4723 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.547529 4723 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.547556 4723 state_mem.go:36] "Initialized new in-memory state store" Dec 11 15:23:09 crc kubenswrapper[4723]: W1211 15:23:09.548088 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.549323 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.590857 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.648062 4723 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.690183 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="400ms" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.691380 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.791937 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.848990 4723 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.892621 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.937499 4723 policy_none.go:49] "None policy: Start" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.940398 4723 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 11 15:23:09 crc kubenswrapper[4723]: I1211 15:23:09.940442 4723 state_mem.go:35] "Initializing new in-memory state store" Dec 11 15:23:09 crc kubenswrapper[4723]: E1211 15:23:09.993771 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.092201 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="800ms" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.094276 4723 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.151328 4723 manager.go:334] "Starting Device Plugin manager" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.151413 4723 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.151428 4723 server.go:79] "Starting device plugin registration server" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.152012 4723 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.152034 4723 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.152204 4723 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.152288 4723 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.152296 4723 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.160001 4723 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.249323 4723 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.249546 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.251385 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.251473 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.251494 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.251721 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.252402 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.252499 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.252501 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.253339 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.253446 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.253495 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.253746 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.253907 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254021 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254370 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254462 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254494 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254534 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254526 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254657 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.254681 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255501 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255580 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255606 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255636 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255706 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255732 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.255725 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.255927 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.256052 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.256073 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258041 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258108 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258124 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258164 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258401 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258512 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258562 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258617 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.258714 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.259820 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.259872 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.259891 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.260101 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.260126 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.260134 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.260237 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.260323 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.261705 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.261758 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.261776 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334157 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334228 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334274 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334305 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334462 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334532 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334567 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334590 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334620 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334668 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334741 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334808 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334903 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.334949 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.335023 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436392 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436461 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436483 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436505 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436527 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436543 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436570 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436591 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436610 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436626 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436642 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436694 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436748 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436782 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436777 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436828 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436792 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437275 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437498 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437534 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437356 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437577 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437589 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436834 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437670 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436803 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437605 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437628 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.437556 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.436798 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.456023 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.457848 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.457953 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.458043 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.458148 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.458797 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.481655 4723 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.487823 4723 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 06:50:26.512115108 +0000 UTC Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.488098 4723 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 111h27m16.024025582s for next certificate rotation Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.489344 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.489428 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.587176 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.595182 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.609339 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.611553 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-46a43475018bcdebfbaf8d7ea8f6ee4d3704a729745e7ca16fbf08ec6cf05454 WatchSource:0}: Error finding container 46a43475018bcdebfbaf8d7ea8f6ee4d3704a729745e7ca16fbf08ec6cf05454: Status 404 returned error can't find the container with id 46a43475018bcdebfbaf8d7ea8f6ee4d3704a729745e7ca16fbf08ec6cf05454 Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.614667 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-e225d098da1e07b93889c184a7e4a04fe8e08a0fa2a40aabf645faa8e2ddf3ac WatchSource:0}: Error finding container e225d098da1e07b93889c184a7e4a04fe8e08a0fa2a40aabf645faa8e2ddf3ac: Status 404 returned error can't find the container with id e225d098da1e07b93889c184a7e4a04fe8e08a0fa2a40aabf645faa8e2ddf3ac Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.617398 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.623812 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.634013 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.634129 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.680260 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.680362 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.754508 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.754587 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.832024 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1291d9d24e3a12f855df9088f0e52eb1d4f25fee6d49899d419e32b3c8f6030 WatchSource:0}: Error finding container c1291d9d24e3a12f855df9088f0e52eb1d4f25fee6d49899d419e32b3c8f6030: Status 404 returned error can't find the container with id c1291d9d24e3a12f855df9088f0e52eb1d4f25fee6d49899d419e32b3c8f6030 Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.833279 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-335be45b49c8e4894cb11ccffc1889b81dddda1e123148e10b21cae710b6080f WatchSource:0}: Error finding container 335be45b49c8e4894cb11ccffc1889b81dddda1e123148e10b21cae710b6080f: Status 404 returned error can't find the container with id 335be45b49c8e4894cb11ccffc1889b81dddda1e123148e10b21cae710b6080f Dec 11 15:23:10 crc kubenswrapper[4723]: W1211 15:23:10.835117 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9a1504dcde729a3de86fac1277609764ffd582b51b26c17351d1cab0f729a1b9 WatchSource:0}: Error finding container 9a1504dcde729a3de86fac1277609764ffd582b51b26c17351d1cab0f729a1b9: Status 404 returned error can't find the container with id 9a1504dcde729a3de86fac1277609764ffd582b51b26c17351d1cab0f729a1b9 Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.859545 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.860724 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.860757 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.860767 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:10 crc kubenswrapper[4723]: I1211 15:23:10.860791 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.861262 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Dec 11 15:23:10 crc kubenswrapper[4723]: E1211 15:23:10.893343 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="1.6s" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.452558 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 15:23:11 crc kubenswrapper[4723]: E1211 15:23:11.453578 4723 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.151:6443: connect: connection refused" logger="UnhandledError" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.482129 4723 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.151:6443: connect: connection refused Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.554272 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2" exitCode=0 Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.554354 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.554455 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c1291d9d24e3a12f855df9088f0e52eb1d4f25fee6d49899d419e32b3c8f6030"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.554598 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556082 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556112 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556122 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556397 4723 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="43c9f78b1c9e0dd4dc99af5679e7417cf520bb90cde1b10349936bb3fbdd4e9a" exitCode=0 Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556491 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"43c9f78b1c9e0dd4dc99af5679e7417cf520bb90cde1b10349936bb3fbdd4e9a"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556577 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e225d098da1e07b93889c184a7e4a04fe8e08a0fa2a40aabf645faa8e2ddf3ac"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.556735 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557675 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557705 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557717 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557736 4723 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="28bb3b5cabde8f044b8728082b51c9203e24944336f2920ab866dc5f1f81c813" exitCode=0 Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557823 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"28bb3b5cabde8f044b8728082b51c9203e24944336f2920ab866dc5f1f81c813"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557857 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"46a43475018bcdebfbaf8d7ea8f6ee4d3704a729745e7ca16fbf08ec6cf05454"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557900 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.557913 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561431 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561489 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561499 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561466 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561905 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.561918 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.564474 4723 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="8d626de27959dbecccfbbb9611b2b2eb340bbeaba60db184063f208f88a13af1" exitCode=0 Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.564549 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"8d626de27959dbecccfbbb9611b2b2eb340bbeaba60db184063f208f88a13af1"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.564580 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9a1504dcde729a3de86fac1277609764ffd582b51b26c17351d1cab0f729a1b9"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.564683 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.565877 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.565936 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.565949 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.567028 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"90f4d5e38cec9da43bfd952a821acb47cf0134d9f5d47656bed0c5159876e42b"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.567075 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"335be45b49c8e4894cb11ccffc1889b81dddda1e123148e10b21cae710b6080f"} Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.661684 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.665531 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.665587 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.665599 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:11 crc kubenswrapper[4723]: I1211 15:23:11.665633 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:11 crc kubenswrapper[4723]: E1211 15:23:11.670854 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.151:6443: connect: connection refused" node="crc" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.573920 4723 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="94e471c3317a70b9c510040eb3cc6eb2d0d79b6f775ba38adaf1f98a8cde47cd" exitCode=0 Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.574019 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"94e471c3317a70b9c510040eb3cc6eb2d0d79b6f775ba38adaf1f98a8cde47cd"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.574275 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.578990 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.579045 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.579061 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.580717 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"855eae118e9922e98a1bde24473d836a8b1259dae495718ce2d3acb981a76f0c"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.580846 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.582233 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.582284 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.582300 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.584405 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4ec97a542b9f7c2ca32ef2f97aa50739553eb6247d221aa8fdf32a4571b6c06c"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.584465 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"79cdab3595f8dcbff37c659acff083d972d7c8ed24d5530bf6b23e20a5e3c4a4"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.584483 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c4cdbf4fc7cac3cac6dacb044e7c64c0989c086aedc59385cabadb71a4d9a2c8"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.584615 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.585713 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.585751 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.585765 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.588835 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"34c3cf94f3ba1cb85776bc160c1f958d5e2ac65fbaf7cc6072a8c746a8f616f5"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.588872 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bccd608356abe291ba5a53ea7e35387fe291c6b269806381ec5fd5694cde3e36"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.588886 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0995457ef833d6f5b4bbc321a2ba051956352e9d85b06c2e0c11dd0c3aea1849"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.588994 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.589941 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.590000 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.590013 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.605320 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.605378 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.605396 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.605410 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865"} Dec 11 15:23:12 crc kubenswrapper[4723]: I1211 15:23:12.766018 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.271745 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.273164 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.273229 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.273243 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.273281 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.523912 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.612071 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5"} Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.612157 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.613194 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.613233 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.613245 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.615497 4723 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f4f68ffde32c7b85a6df9b3a82c625a2fcbbacd44c8b98ef0d2b3b6fa525429f" exitCode=0 Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.615571 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f4f68ffde32c7b85a6df9b3a82c625a2fcbbacd44c8b98ef0d2b3b6fa525429f"} Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.615608 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.615788 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616672 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616701 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616711 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616714 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616740 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.616750 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:13 crc kubenswrapper[4723]: I1211 15:23:13.807761 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.619888 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"71a5ecacfbf777cfd25e0b003678a6d30d2de72a3731d3fb1e20b1a0d95973f0"} Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.619938 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"db46e22e3deac155e39e113a6af5ef3563543ab0d21c8fc4589964c84eec06e3"} Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.619961 4723 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620020 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620034 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620874 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620902 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620914 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.620985 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.621032 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:14 crc kubenswrapper[4723]: I1211 15:23:14.621042 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.519190 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630739 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f92ab08794bc167d2c4348e51504bda32ffeac73c3771f4f315e91c0b6a48693"} Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630807 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dc404f80336e28fbda764074124a71793a223f5c386abf6ca254c9575ab66692"} Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630826 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e877b2a85cf8934ff35d79a7746b27a5aeb07c87c41bb3f2d728b5210066d1f0"} Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630848 4723 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630921 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.630948 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.632865 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.632898 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.632909 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.633155 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.633219 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:15 crc kubenswrapper[4723]: I1211 15:23:15.633247 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.410365 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.410646 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.412303 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.412385 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.412419 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.416672 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.524780 4723 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.524915 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.633049 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.633063 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634349 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634389 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634401 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634791 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634823 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.634834 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:16 crc kubenswrapper[4723]: I1211 15:23:16.998183 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 11 15:23:17 crc kubenswrapper[4723]: I1211 15:23:17.636107 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:17 crc kubenswrapper[4723]: I1211 15:23:17.637354 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:17 crc kubenswrapper[4723]: I1211 15:23:17.637463 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:17 crc kubenswrapper[4723]: I1211 15:23:17.637502 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.233178 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.233431 4723 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.233486 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.235286 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.235320 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.235332 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.983618 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.983816 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.984951 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.985006 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:18 crc kubenswrapper[4723]: I1211 15:23:18.985019 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:20 crc kubenswrapper[4723]: E1211 15:23:20.160384 4723 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.743144 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.743439 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.745884 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.746019 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.746046 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.829487 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.829708 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.831046 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.831123 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:20 crc kubenswrapper[4723]: I1211 15:23:20.831142 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.020091 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.020335 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.021666 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.021741 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.021756 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:22 crc kubenswrapper[4723]: W1211 15:23:22.404390 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.405030 4723 trace.go:236] Trace[1744509771]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 15:23:12.403) (total time: 10001ms): Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[1744509771]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:23:22.404) Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[1744509771]: [10.001717071s] [10.001717071s] END Dec 11 15:23:22 crc kubenswrapper[4723]: E1211 15:23:22.405067 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.482781 4723 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 11 15:23:22 crc kubenswrapper[4723]: E1211 15:23:22.495560 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 11 15:23:22 crc kubenswrapper[4723]: E1211 15:23:22.525727 4723 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.1880328260e4f81b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 15:23:09.481990171 +0000 UTC m=+0.256223606,LastTimestamp:2025-12-11 15:23:09.481990171 +0000 UTC m=+0.256223606,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 15:23:22 crc kubenswrapper[4723]: W1211 15:23:22.719257 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.719404 4723 trace.go:236] Trace[1766095699]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 15:23:12.718) (total time: 10001ms): Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[1766095699]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:23:22.719) Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[1766095699]: [10.001254072s] [10.001254072s] END Dec 11 15:23:22 crc kubenswrapper[4723]: E1211 15:23:22.719439 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.770692 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.770891 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.772401 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.772456 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.772469 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:22 crc kubenswrapper[4723]: W1211 15:23:22.824261 4723 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 15:23:22 crc kubenswrapper[4723]: I1211 15:23:22.824370 4723 trace.go:236] Trace[826483795]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 15:23:12.822) (total time: 10002ms): Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[826483795]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:23:22.824) Dec 11 15:23:22 crc kubenswrapper[4723]: Trace[826483795]: [10.002065439s] [10.002065439s] END Dec 11 15:23:22 crc kubenswrapper[4723]: E1211 15:23:22.824396 4723 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 15:23:23 crc kubenswrapper[4723]: E1211 15:23:23.274773 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.539780 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.539856 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.544806 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.544877 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.813443 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]log ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]etcd ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/openshift.io-api-request-count-filter ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/openshift.io-startkubeinformers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/priority-and-fairness-config-consumer ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/priority-and-fairness-filter ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-apiextensions-informers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-apiextensions-controllers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/crd-informer-synced ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-system-namespaces-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-cluster-authentication-info-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-legacy-token-tracking-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-service-ip-repair-controllers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Dec 11 15:23:23 crc kubenswrapper[4723]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/priority-and-fairness-config-producer ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/bootstrap-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/start-kube-aggregator-informers ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-status-local-available-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-status-remote-available-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-registration-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-wait-for-first-sync ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-discovery-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/kube-apiserver-autoregistration ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]autoregister-completion ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-openapi-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: [+]poststarthook/apiservice-openapiv3-controller ok Dec 11 15:23:23 crc kubenswrapper[4723]: livez check failed Dec 11 15:23:23 crc kubenswrapper[4723]: I1211 15:23:23.813515 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.242728 4723 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.432268 4723 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.475501 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.477046 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.477084 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.477094 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.477116 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:26 crc kubenswrapper[4723]: E1211 15:23:26.480487 4723 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.485858 4723 apiserver.go:52] "Watching apiserver" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.489412 4723 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.489787 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490223 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490276 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:26 crc kubenswrapper[4723]: E1211 15:23:26.490309 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490424 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490486 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:26 crc kubenswrapper[4723]: E1211 15:23:26.490615 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490916 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.490927 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:26 crc kubenswrapper[4723]: E1211 15:23:26.491124 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.492510 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.492737 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.495094 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496029 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496205 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496676 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496716 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496854 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.496870 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.524552 4723 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.524712 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.535394 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.549958 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.562654 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.574057 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.584156 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.588682 4723 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.595084 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.609523 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:26 crc kubenswrapper[4723]: I1211 15:23:26.625353 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.543626 4723 trace.go:236] Trace[2029715716]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 15:23:13.860) (total time: 14682ms): Dec 11 15:23:28 crc kubenswrapper[4723]: Trace[2029715716]: ---"Objects listed" error: 14682ms (15:23:28.543) Dec 11 15:23:28 crc kubenswrapper[4723]: Trace[2029715716]: [14.682880857s] [14.682880857s] END Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.543672 4723 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.545257 4723 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.547714 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.547881 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.547718 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.547714 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.548107 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.548168 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.560894 4723 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.584615 4723 csr.go:261] certificate signing request csr-p8lpl is approved, waiting to be issued Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.585741 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45936->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.585814 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45936->192.168.126.11:17697: read: connection reset by peer" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.597361 4723 csr.go:257] certificate signing request csr-p8lpl is issued Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.598573 4723 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647020 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647088 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647113 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647142 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647163 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647184 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647204 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647229 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647261 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647281 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647303 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647326 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647345 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647365 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647389 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647437 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647455 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647486 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647509 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647530 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647549 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647580 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647600 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647627 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647653 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647680 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647700 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647722 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647746 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647767 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647789 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647809 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647829 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647851 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647875 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647897 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647918 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647944 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647962 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.647997 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648021 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648041 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648059 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648081 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648099 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648129 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648149 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648338 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648421 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648488 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648522 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648544 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648563 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648603 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648642 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648671 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648706 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648779 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648812 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648842 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648863 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648880 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648898 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648924 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.648950 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649029 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649051 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649073 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649102 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649148 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649168 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649190 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649218 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649237 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649255 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649275 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649292 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649310 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649329 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649399 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649418 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649437 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649458 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649479 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649497 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649515 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649537 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649559 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649576 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649599 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649618 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649638 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649657 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649677 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649701 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649722 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649743 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649760 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649784 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649805 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649825 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649846 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649866 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649883 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649902 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649920 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649941 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649961 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.649998 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650020 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650039 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650060 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650078 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650097 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650115 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650133 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650152 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650170 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650188 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650206 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650225 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650244 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650273 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650293 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650312 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650330 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650348 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650366 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650387 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650415 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650438 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650464 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650482 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650499 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650519 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650537 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650561 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650580 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650600 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650623 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650644 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650662 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650681 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650702 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650684 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650721 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650873 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650914 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.650945 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651000 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651036 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651122 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651160 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651187 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651197 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651217 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651250 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651278 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651311 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651348 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651375 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651411 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651440 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651468 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651492 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651515 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651545 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651567 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651591 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651618 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651646 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651674 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651700 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651729 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651754 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651781 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651810 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651839 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651864 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651890 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651917 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651944 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.651993 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652019 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652043 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652069 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652099 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652123 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652150 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652173 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652199 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652225 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652253 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652280 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652305 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652329 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652353 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652417 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652448 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652485 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652517 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652543 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652571 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652602 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652655 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652681 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652712 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652746 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652777 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652806 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652831 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652913 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.652930 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653170 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.653406 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:29.153372092 +0000 UTC m=+19.927605707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653532 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653658 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653658 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653574 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653796 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.653873 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654013 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654267 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654307 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654335 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654392 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654584 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.654948 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655058 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655071 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655114 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655278 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655426 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.655767 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656031 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656054 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656333 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656417 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656508 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656592 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656820 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656850 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.656933 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657012 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657003 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657231 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657242 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657317 4723 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.657374 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.658599 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.658786 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.658863 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.658925 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659068 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659160 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659259 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659463 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659571 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659658 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659697 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659732 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.659930 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660174 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660240 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660356 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660418 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660519 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660686 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660775 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.660910 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661267 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661455 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661475 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661499 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661711 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.661908 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.662013 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.662350 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.662432 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.662469 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663012 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663048 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663079 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663020 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663306 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.663463 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.664880 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.665992 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.665996 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.666397 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.666579 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.666621 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.666687 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.666941 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667220 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667341 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667380 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667646 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667771 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667802 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.667900 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668123 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668175 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668205 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668426 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668492 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668576 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.668932 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669041 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669165 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669228 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669362 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669379 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.669429 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.670556 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.670749 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.671093 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.671951 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672102 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.671899 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672441 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672382 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672534 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.671902 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672705 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.672681 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673009 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673091 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673188 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673285 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673576 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673708 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673829 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.673837 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674089 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674314 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674345 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674599 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674829 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.674915 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.675120 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.675482 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.675717 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.675746 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.675864 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:29.175834423 +0000 UTC m=+19.950067858 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.676356 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.677105 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.677380 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.677687 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.678808 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.678814 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.678923 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.679122 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.679437 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.675749 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.679871 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.680155 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.680831 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.681623 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.682035 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.684371 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.693507 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.693504 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.680502 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.868910 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.869169 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.869563 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.870032 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.680713 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.872023 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.873040 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.873777 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.874840 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.682199 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.881633 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.881634 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.881903 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.881933 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882023 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882111 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882198 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882298 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882400 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882427 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882745 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882767 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882776 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882822 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882926 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.882930 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883044 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883080 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883105 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883141 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883269 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883336 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883320 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883449 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883553 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883632 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.883658 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.883884 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.883999 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.884097 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.884003 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.884293 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.884369 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884511 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884606 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884724 4723 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884748 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884761 4723 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884777 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884790 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884802 4723 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884815 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.884827 4723 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.885156 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.885728 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.874577 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.885953 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.886326 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.886540 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:29.386512589 +0000 UTC m=+20.160746204 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.886683 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.886709 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.886893 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887179 4723 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.887166 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:29.387138246 +0000 UTC m=+20.161371751 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887220 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887250 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887268 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: E1211 15:23:28.887300 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:29.387275649 +0000 UTC m=+20.161509084 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887317 4723 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887335 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887350 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887364 4723 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887374 4723 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887386 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887396 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887406 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887416 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887425 4723 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887434 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887444 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887453 4723 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887465 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887474 4723 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887483 4723 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887493 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887502 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887511 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887521 4723 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887528 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887539 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887547 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887556 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887565 4723 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887574 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887583 4723 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887591 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887600 4723 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887609 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887618 4723 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887626 4723 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887636 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887648 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887659 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887670 4723 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887680 4723 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887690 4723 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887699 4723 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887709 4723 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887719 4723 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887727 4723 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887737 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887746 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887755 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887764 4723 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887774 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887782 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887791 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887799 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887808 4723 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887817 4723 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887826 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887836 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887846 4723 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887855 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887864 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887874 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887883 4723 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887892 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887902 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887913 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887923 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887933 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887941 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887951 4723 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887960 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887987 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887996 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888005 4723 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888013 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888022 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888031 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888040 4723 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888050 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888061 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888079 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888088 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888096 4723 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888104 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888113 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888122 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888131 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888139 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888148 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888145 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888156 4723 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888211 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888221 4723 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888232 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888241 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888250 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888260 4723 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888269 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888278 4723 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888287 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888296 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888305 4723 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888314 4723 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888325 4723 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888336 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888346 4723 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888354 4723 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888364 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888373 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888383 4723 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888393 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888403 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888413 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888423 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888422 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887621 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.887921 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888433 4723 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888494 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888511 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888527 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888541 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888557 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888569 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888580 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888593 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888606 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888621 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888634 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888647 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888660 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888674 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888686 4723 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888699 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888713 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888726 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888738 4723 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888751 4723 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888767 4723 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888780 4723 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888793 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888806 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888819 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888831 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888844 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888857 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888869 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888882 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888895 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888909 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888944 4723 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888959 4723 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.888991 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889006 4723 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889019 4723 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889032 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889045 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889059 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889072 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889085 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889098 4723 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889112 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889124 4723 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889136 4723 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889148 4723 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889160 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889172 4723 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889186 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889197 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889212 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889224 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889236 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889248 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889280 4723 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889365 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.886260 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.889447 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.890523 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.891694 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.891733 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.891890 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.899545 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.905097 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.905900 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.907336 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.913359 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.918538 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.921805 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.924627 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:23:28 crc kubenswrapper[4723]: W1211 15:23:28.927597 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-7cf8e62c9f4c53921f669f62faae4d4e76d5b149d91c4a5eb5478d2d43b5877f WatchSource:0}: Error finding container 7cf8e62c9f4c53921f669f62faae4d4e76d5b149d91c4a5eb5478d2d43b5877f: Status 404 returned error can't find the container with id 7cf8e62c9f4c53921f669f62faae4d4e76d5b149d91c4a5eb5478d2d43b5877f Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.931171 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.946674 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.960465 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.973949 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.983385 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.984691 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.984768 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990511 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990536 4723 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990551 4723 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990565 4723 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990578 4723 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990590 4723 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990603 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990613 4723 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990623 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990633 4723 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990645 4723 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990653 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990662 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990672 4723 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.990680 4723 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 15:23:28 crc kubenswrapper[4723]: I1211 15:23:28.997478 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.007788 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.029864 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.042117 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.055593 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.069018 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.078600 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.192166 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.192295 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.192406 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.192437 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:30.192393002 +0000 UTC m=+20.966626437 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.192498 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:30.192488654 +0000 UTC m=+20.966722089 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.395006 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.395086 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.395122 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395303 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395345 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395501 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:30.395459734 +0000 UTC m=+21.169693179 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395367 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395554 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395315 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395635 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395656 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395639 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:30.395609998 +0000 UTC m=+21.169843463 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.395740 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:30.395709751 +0000 UTC m=+21.169943186 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.408869 4723 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 11 15:23:29 crc kubenswrapper[4723]: W1211 15:23:29.409573 4723 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 11 15:23:29 crc kubenswrapper[4723]: W1211 15:23:29.409643 4723 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.410437 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Post \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases?timeout=10s\": read tcp 38.102.83.151:60112->38.102.83.151:6443: use of closed network connection" interval="6.4s" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.817351 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-11 15:18:28 +0000 UTC, rotation deadline is 2026-09-22 20:28:35.683191478 +0000 UTC Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.817600 4723 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6845h5m5.865600061s for next certificate rotation Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.821009 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.821889 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.823666 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.824687 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.825876 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.826949 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.832862 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.835510 4723 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.835615 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.836794 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.838256 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.840789 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.842006 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.842580 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.844634 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: E1211 15:23:29.845299 4723 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.846020 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.847235 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.849279 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.850387 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.852501 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.853389 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.854817 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.856360 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.857423 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.861607 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.861626 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.863679 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.868026 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.868999 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.871520 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.872638 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.873311 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.876046 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.876688 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.878468 4723 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.878741 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.879530 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.881733 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.883603 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.884749 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.893954 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.896055 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.897308 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.900544 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.902060 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.902841 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.903913 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.904713 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.907167 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.907845 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.908763 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.909399 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.910464 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.911187 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.912081 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.912625 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.913120 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.914062 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.914634 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.915888 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.916511 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1f4ede8cbbe963385ad4113d6e24a75e0152244401bc3be3e10ba63cc93c8500"} Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.916545 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c140d254e74f66abd9b337d4b3c3de3f4f709bc3f58130b4446327e2f61f6ece"} Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.916561 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7cf8e62c9f4c53921f669f62faae4d4e76d5b149d91c4a5eb5478d2d43b5877f"} Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.922322 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.938112 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:29 crc kubenswrapper[4723]: I1211 15:23:29.965634 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.223664 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.223921 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:32.223871825 +0000 UTC m=+22.998105260 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.224170 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.224364 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.224466 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:32.224453311 +0000 UTC m=+22.998686916 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.426036 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.426110 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.426151 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426237 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426274 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426286 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426287 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426312 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426324 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426348 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:32.426331821 +0000 UTC m=+23.200565256 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426372 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:32.426359052 +0000 UTC m=+23.200592487 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426489 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.426646 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:32.426612479 +0000 UTC m=+23.200846064 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.547963 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.548011 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.548025 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.548159 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.548273 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:30 crc kubenswrapper[4723]: E1211 15:23:30.548448 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.839983 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de"} Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.842015 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7b4ec1ca2909b77f08fb98be93136f1ac6037bf3ca5cfa42b5b19989c19d7cbd"} Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.842073 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"dc5240911746f51523759712abf6f0e72a0d62056bdeff1ccf8a69f9c83c3b5c"} Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.844264 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.845668 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5" exitCode=255 Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.845708 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5"} Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.846558 4723 scope.go:117] "RemoveContainer" containerID="2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.893941 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:30Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.953890 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:30Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.975241 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:30Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:30 crc kubenswrapper[4723]: I1211 15:23:30.995009 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:30Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.020147 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.039558 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.059681 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.077352 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"message\\\":\\\"C_SHA256' detected.\\\\nW1211 15:23:28.573171 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 15:23:28.573175 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 15:23:28.573178 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 15:23:28.573181 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 15:23:28.576942 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765466592\\\\\\\\\\\\\\\" (2025-12-11 15:23:12 +0000 UTC to 2026-01-10 15:23:13 +0000 UTC (now=2025-12-11 15:23:28.576904546 +0000 UTC))\\\\\\\"\\\\nI1211 15:23:28.576979 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577056 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577142 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577177 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577203 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1211 15:23:28.577207 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1211 15:23:28.577218 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\"\\\\nF1211 15:23:28.577322 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.109331 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.124654 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.138879 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.152127 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-hpc9f"] Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.152616 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155358 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155365 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155495 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-j85c2"] Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155508 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155872 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.155988 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.156231 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-bxzdh"] Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.156433 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.156461 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-2v4p2"] Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.156620 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.156800 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.160369 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161001 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161017 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161268 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161324 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161333 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161553 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161498 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161675 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.161797 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.168845 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b4ec1ca2909b77f08fb98be93136f1ac6037bf3ca5cfa42b5b19989c19d7cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5240911746f51523759712abf6f0e72a0d62056bdeff1ccf8a69f9c83c3b5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.190278 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.212011 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.228900 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpc9f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8923b3a7-6d56-4fb6-b496-b718ea3a2071\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bk97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpc9f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.241118 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2v4p2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2062e88-d88f-4e28-abee-8ca69fd16bba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2k6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2v4p2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.255628 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b4ec1ca2909b77f08fb98be93136f1ac6037bf3ca5cfa42b5b19989c19d7cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5240911746f51523759712abf6f0e72a0d62056bdeff1ccf8a69f9c83c3b5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.270342 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.286402 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.299122 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e86455ee-3aa9-411e-b46a-ab60dcc77f95\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bbpd6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bbpd6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bxzdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.315494 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"message\\\":\\\"C_SHA256' detected.\\\\nW1211 15:23:28.573171 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 15:23:28.573175 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 15:23:28.573178 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 15:23:28.573181 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 15:23:28.576942 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765466592\\\\\\\\\\\\\\\" (2025-12-11 15:23:12 +0000 UTC to 2026-01-10 15:23:13 +0000 UTC (now=2025-12-11 15:23:28.576904546 +0000 UTC))\\\\\\\"\\\\nI1211 15:23:28.576979 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577056 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577142 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577177 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577203 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1211 15:23:28.577207 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1211 15:23:28.577218 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\"\\\\nF1211 15:23:28.577322 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.329569 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333387 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-multus-certs\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333420 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-socket-dir-parent\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333440 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-daemon-config\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333464 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bk97\" (UniqueName: \"kubernetes.io/projected/8923b3a7-6d56-4fb6-b496-b718ea3a2071-kube-api-access-2bk97\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333484 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e2062e88-d88f-4e28-abee-8ca69fd16bba-hosts-file\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333506 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cni-binary-copy\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333527 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e86455ee-3aa9-411e-b46a-ab60dcc77f95-rootfs\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333544 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-kubelet\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333567 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-conf-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333601 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-netns\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333622 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-multus\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333645 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6bbp\" (UniqueName: \"kubernetes.io/projected/579cfad2-6e0a-4e52-b84d-0d17b6261f89-kube-api-access-s6bbp\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333669 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-hostroot\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333687 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-etc-kubernetes\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333706 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333725 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-os-release\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333749 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e86455ee-3aa9-411e-b46a-ab60dcc77f95-mcd-auth-proxy-config\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333768 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-os-release\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333789 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-system-cni-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333826 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e86455ee-3aa9-411e-b46a-ab60dcc77f95-proxy-tls\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333844 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-k8s-cni-cncf-io\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333872 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbpd6\" (UniqueName: \"kubernetes.io/projected/e86455ee-3aa9-411e-b46a-ab60dcc77f95-kube-api-access-bbpd6\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333892 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-system-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333912 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333933 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cnibin\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333953 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-binary-copy\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.333989 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.334014 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2k6d\" (UniqueName: \"kubernetes.io/projected/e2062e88-d88f-4e28-abee-8ca69fd16bba-kube-api-access-f2k6d\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.334034 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cnibin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.334055 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-bin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.343401 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.359743 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.389908 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"579cfad2-6e0a-4e52-b84d-0d17b6261f89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j85c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.434895 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e2062e88-d88f-4e28-abee-8ca69fd16bba-hosts-file\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.434954 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cni-binary-copy\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.434996 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e86455ee-3aa9-411e-b46a-ab60dcc77f95-rootfs\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435018 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-kubelet\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435046 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-conf-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435083 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-netns\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435164 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-kubelet\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435178 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e2062e88-d88f-4e28-abee-8ca69fd16bba-hosts-file\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435208 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e86455ee-3aa9-411e-b46a-ab60dcc77f95-rootfs\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435198 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-multus\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435228 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-netns\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435227 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-multus\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435170 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-conf-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435322 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6bbp\" (UniqueName: \"kubernetes.io/projected/579cfad2-6e0a-4e52-b84d-0d17b6261f89-kube-api-access-s6bbp\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435362 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-hostroot\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435399 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-etc-kubernetes\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435431 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435464 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-os-release\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435478 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-hostroot\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435490 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-etc-kubernetes\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435501 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e86455ee-3aa9-411e-b46a-ab60dcc77f95-mcd-auth-proxy-config\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435564 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-os-release\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435598 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-system-cni-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435686 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e86455ee-3aa9-411e-b46a-ab60dcc77f95-proxy-tls\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435715 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-k8s-cni-cncf-io\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435751 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-system-cni-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435784 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbpd6\" (UniqueName: \"kubernetes.io/projected/e86455ee-3aa9-411e-b46a-ab60dcc77f95-kube-api-access-bbpd6\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435813 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-system-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435847 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435878 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cnibin\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435911 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-binary-copy\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435936 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cni-binary-copy\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435943 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.435992 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-os-release\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436025 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-os-release\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436057 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-system-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436020 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2k6d\" (UniqueName: \"kubernetes.io/projected/e2062e88-d88f-4e28-abee-8ca69fd16bba-kube-api-access-f2k6d\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436077 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-k8s-cni-cncf-io\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436095 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cnibin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436119 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-bin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436145 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-multus-certs\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436173 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-socket-dir-parent\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436196 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-daemon-config\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436218 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436220 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bk97\" (UniqueName: \"kubernetes.io/projected/8923b3a7-6d56-4fb6-b496-b718ea3a2071-kube-api-access-2bk97\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436302 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-var-lib-cni-bin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436369 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-tuning-conf-dir\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436384 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-host-run-multus-certs\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436737 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-socket-dir-parent\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436751 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cnibin\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.436909 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-cnibin\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.437055 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-cni-dir\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.437650 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/579cfad2-6e0a-4e52-b84d-0d17b6261f89-cni-binary-copy\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.437788 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8923b3a7-6d56-4fb6-b496-b718ea3a2071-multus-daemon-config\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.440863 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e86455ee-3aa9-411e-b46a-ab60dcc77f95-mcd-auth-proxy-config\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.442958 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e86455ee-3aa9-411e-b46a-ab60dcc77f95-proxy-tls\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.465054 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2k6d\" (UniqueName: \"kubernetes.io/projected/e2062e88-d88f-4e28-abee-8ca69fd16bba-kube-api-access-f2k6d\") pod \"node-resolver-2v4p2\" (UID: \"e2062e88-d88f-4e28-abee-8ca69fd16bba\") " pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.468986 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbpd6\" (UniqueName: \"kubernetes.io/projected/e86455ee-3aa9-411e-b46a-ab60dcc77f95-kube-api-access-bbpd6\") pod \"machine-config-daemon-bxzdh\" (UID: \"e86455ee-3aa9-411e-b46a-ab60dcc77f95\") " pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.469559 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bk97\" (UniqueName: \"kubernetes.io/projected/8923b3a7-6d56-4fb6-b496-b718ea3a2071-kube-api-access-2bk97\") pod \"multus-hpc9f\" (UID: \"8923b3a7-6d56-4fb6-b496-b718ea3a2071\") " pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.470159 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6bbp\" (UniqueName: \"kubernetes.io/projected/579cfad2-6e0a-4e52-b84d-0d17b6261f89-kube-api-access-s6bbp\") pod \"multus-additional-cni-plugins-j85c2\" (UID: \"579cfad2-6e0a-4e52-b84d-0d17b6261f89\") " pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.477985 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-j85c2" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.488735 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.497337 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2v4p2" Dec 11 15:23:31 crc kubenswrapper[4723]: W1211 15:23:31.508272 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode86455ee_3aa9_411e_b46a_ab60dcc77f95.slice/crio-208118f0b66ea1a409807f429c41bd83bd4b00bf30c24dea5773457ecee65cfd WatchSource:0}: Error finding container 208118f0b66ea1a409807f429c41bd83bd4b00bf30c24dea5773457ecee65cfd: Status 404 returned error can't find the container with id 208118f0b66ea1a409807f429c41bd83bd4b00bf30c24dea5773457ecee65cfd Dec 11 15:23:31 crc kubenswrapper[4723]: W1211 15:23:31.520024 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2062e88_d88f_4e28_abee_8ca69fd16bba.slice/crio-de79d2f6fa7485cf2a8e35e303d4baf848fb14f26e321451c2794314968992b2 WatchSource:0}: Error finding container de79d2f6fa7485cf2a8e35e303d4baf848fb14f26e321451c2794314968992b2: Status 404 returned error can't find the container with id de79d2f6fa7485cf2a8e35e303d4baf848fb14f26e321451c2794314968992b2 Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.530879 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j6xw5"] Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.534488 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.537316 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.537507 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.541143 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.541701 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.541998 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.542141 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.542359 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544467 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544507 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544532 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544548 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544565 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544584 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544604 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544621 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544649 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544665 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544680 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544707 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544724 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544741 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544759 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544775 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544792 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544810 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544829 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.544847 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhnk2\" (UniqueName: \"kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.554662 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e86455ee-3aa9-411e-b46a-ab60dcc77f95\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bbpd6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bbpd6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-bxzdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.571511 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d878d079-6be7-4e08-8d65-1795bb539c61\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"message\\\":\\\"C_SHA256' detected.\\\\nW1211 15:23:28.573171 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 15:23:28.573175 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 15:23:28.573178 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 15:23:28.573181 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1211 15:23:28.576942 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1765466592\\\\\\\\\\\\\\\" (2025-12-11 15:23:12 +0000 UTC to 2026-01-10 15:23:13 +0000 UTC (now=2025-12-11 15:23:28.576904546 +0000 UTC))\\\\\\\"\\\\nI1211 15:23:28.576979 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577056 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1211 15:23:28.577142 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577177 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1211 15:23:28.577203 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1211 15:23:28.577207 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1211 15:23:28.577218 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2384301709/tls.crt::/tmp/serving-cert-2384301709/tls.key\\\\\\\"\\\\nF1211 15:23:28.577322 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T15:23:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T15:23:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.589833 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.604410 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.618711 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.635694 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645703 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645758 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645784 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645805 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645824 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645846 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645868 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645891 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645913 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645942 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.645984 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646020 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646042 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646064 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646086 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646107 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646131 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646152 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646175 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646199 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhnk2\" (UniqueName: \"kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646628 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646676 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646709 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.646740 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647101 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647156 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647180 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647149 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647214 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647207 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647242 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647220 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647242 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647289 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647215 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647644 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.647781 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.648515 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.652279 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.656085 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"579cfad2-6e0a-4e52-b84d-0d17b6261f89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s6bbp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-j85c2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.667913 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhnk2\" (UniqueName: \"kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2\") pod \"ovnkube-node-j6xw5\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.684214 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"485d782b-f4ea-4a0f-8e25-66b50577addf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lhnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j6xw5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.718641 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpc9f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8923b3a7-6d56-4fb6-b496-b718ea3a2071\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bk97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpc9f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.750981 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2v4p2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2062e88-d88f-4e28-abee-8ca69fd16bba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2k6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2v4p2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.767661 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hpc9f" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.778169 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b4ec1ca2909b77f08fb98be93136f1ac6037bf3ca5cfa42b5b19989c19d7cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5240911746f51523759712abf6f0e72a0d62056bdeff1ccf8a69f9c83c3b5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.815978 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.850439 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerStarted","Data":"e1a4f2fa0bcbcaa7a59d7565b8cd40e1a657ee8d9488ffe3ee8dc7bc3d69dce2"} Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.851417 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpc9f" event={"ID":"8923b3a7-6d56-4fb6-b496-b718ea3a2071","Type":"ContainerStarted","Data":"ce8d8c67278acc95d8e291a8b30444e27048abb229f33ffa7b81bda108ef3f84"} Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.853292 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.854946 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dabae3f47de894040d6fce7d39cafde130405d552e782c954b6bce9d85a30737"} Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.855826 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.857548 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.858897 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2v4p2" event={"ID":"e2062e88-d88f-4e28-abee-8ca69fd16bba","Type":"ContainerStarted","Data":"de79d2f6fa7485cf2a8e35e303d4baf848fb14f26e321451c2794314968992b2"} Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.860098 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"208118f0b66ea1a409807f429c41bd83bd4b00bf30c24dea5773457ecee65cfd"} Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.880576 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpc9f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8923b3a7-6d56-4fb6-b496-b718ea3a2071\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bk97\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpc9f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.894096 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2v4p2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2062e88-d88f-4e28-abee-8ca69fd16bba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:31Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2k6d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T15:23:31Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2v4p2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.909736 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.922614 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b4ec1ca2909b77f08fb98be93136f1ac6037bf3ca5cfa42b5b19989c19d7cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc5240911746f51523759712abf6f0e72a0d62056bdeff1ccf8a69f9c83c3b5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.937046 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a218cff81e9c5bea2cfe7c7324625e108d28e658e45f470f8e4d539c7f7792de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T15:23:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:31 crc kubenswrapper[4723]: I1211 15:23:31.950441 4723 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T15:23:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T15:23:31Z is after 2025-08-24T17:21:41Z" Dec 11 15:23:32 crc kubenswrapper[4723]: W1211 15:23:32.210150 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod485d782b_f4ea_4a0f_8e25_66b50577addf.slice/crio-4f4638fa33cfb3cdc5b4e5f635170d882bbc0f8e116b7bd486c29af5ae07e8b1 WatchSource:0}: Error finding container 4f4638fa33cfb3cdc5b4e5f635170d882bbc0f8e116b7bd486c29af5ae07e8b1: Status 404 returned error can't find the container with id 4f4638fa33cfb3cdc5b4e5f635170d882bbc0f8e116b7bd486c29af5ae07e8b1 Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.249696 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.249821 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.249793441 +0000 UTC m=+27.024026866 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.249898 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.250035 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.250104 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.250085369 +0000 UTC m=+27.024318804 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.270437 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=4.270414113 podStartE2EDuration="4.270414113s" podCreationTimestamp="2025-12-11 15:23:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:32.25272496 +0000 UTC m=+23.026958395" watchObservedRunningTime="2025-12-11 15:23:32.270414113 +0000 UTC m=+23.044647548" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.420224 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-xsqxj"] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.420913 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.424757 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.424770 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.424836 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.425592 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.451631 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfbtw\" (UniqueName: \"kubernetes.io/projected/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-kube-api-access-hfbtw\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.452070 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.452216 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452301 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452409 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.452379161 +0000 UTC m=+27.226612596 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452482 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452534 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452551 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.452501 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-host\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452631 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.452606557 +0000 UTC m=+27.226839992 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452866 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452891 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452904 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.452954 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.452940966 +0000 UTC m=+27.227174401 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.453005 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.453042 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-serviceca\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.547225 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.547234 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.547255 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.548818 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.549021 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.549135 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.553885 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfbtw\" (UniqueName: \"kubernetes.io/projected/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-kube-api-access-hfbtw\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.554011 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-host\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.554080 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-serviceca\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.554186 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-host\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.569470 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfbtw\" (UniqueName: \"kubernetes.io/projected/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-kube-api-access-hfbtw\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.697100 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c75bb560-f1b5-4bd5-b297-a7bb9c46a60f-serviceca\") pod \"node-ca-xsqxj\" (UID: \"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f\") " pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.697769 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.698714 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l"] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.699189 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.699739 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-mwn6z"] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.700422 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.700517 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.702451 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.702450 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.713645 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.715032 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.733144 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-xsqxj" Dec 11 15:23:32 crc kubenswrapper[4723]: W1211 15:23:32.747105 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc75bb560_f1b5_4bd5_b297_a7bb9c46a60f.slice/crio-0eefe8905065970c3ea5822b6fc0ca8307edb040fa2c1cbeaeea92be100962a7 WatchSource:0}: Error finding container 0eefe8905065970c3ea5822b6fc0ca8307edb040fa2c1cbeaeea92be100962a7: Status 404 returned error can't find the container with id 0eefe8905065970c3ea5822b6fc0ca8307edb040fa2c1cbeaeea92be100962a7 Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755382 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755534 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzstp\" (UniqueName: \"kubernetes.io/projected/542875f8-72b1-4325-bc1d-95f3b2f53efc-kube-api-access-qzstp\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755623 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755727 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755811 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnff7\" (UniqueName: \"kubernetes.io/projected/51ec5cb0-ab72-4a83-9ade-525d033e4972-kube-api-access-lnff7\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.755891 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.761889 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=0.76186645 podStartE2EDuration="761.86645ms" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:32.761632784 +0000 UTC m=+23.535866239" watchObservedRunningTime="2025-12-11 15:23:32.76186645 +0000 UTC m=+23.536099885" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.856940 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.857016 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzstp\" (UniqueName: \"kubernetes.io/projected/542875f8-72b1-4325-bc1d-95f3b2f53efc-kube-api-access-qzstp\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.857043 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.857075 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.857099 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnff7\" (UniqueName: \"kubernetes.io/projected/51ec5cb0-ab72-4a83-9ade-525d033e4972-kube-api-access-lnff7\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.857131 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.857699 4723 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: E1211 15:23:32.857889 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs podName:542875f8-72b1-4325-bc1d-95f3b2f53efc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:33.357845768 +0000 UTC m=+24.132079223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs") pod "network-metrics-daemon-mwn6z" (UID: "542875f8-72b1-4325-bc1d-95f3b2f53efc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.858376 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.861866 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.862247 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/51ec5cb0-ab72-4a83-9ade-525d033e4972-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.872947 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"4f4638fa33cfb3cdc5b4e5f635170d882bbc0f8e116b7bd486c29af5ae07e8b1"} Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.875397 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xsqxj" event={"ID":"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f","Type":"ContainerStarted","Data":"0eefe8905065970c3ea5822b6fc0ca8307edb040fa2c1cbeaeea92be100962a7"} Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.876883 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b"} Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.878059 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnff7\" (UniqueName: \"kubernetes.io/projected/51ec5cb0-ab72-4a83-9ade-525d033e4972-kube-api-access-lnff7\") pod \"ovnkube-control-plane-749d76644c-lsd4l\" (UID: \"51ec5cb0-ab72-4a83-9ade-525d033e4972\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.881195 4723 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.882761 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.882814 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.882828 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.882992 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzstp\" (UniqueName: \"kubernetes.io/projected/542875f8-72b1-4325-bc1d-95f3b2f53efc-kube-api-access-qzstp\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.883043 4723 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.893529 4723 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.894042 4723 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.895719 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.895755 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.895767 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.895790 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.895802 4723 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T15:23:32Z","lastTransitionTime":"2025-12-11T15:23:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.958073 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22"] Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.958623 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.963540 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.963728 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.963836 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 15:23:32 crc kubenswrapper[4723]: I1211 15:23:32.964187 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.014006 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" Dec 11 15:23:33 crc kubenswrapper[4723]: W1211 15:23:33.028131 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51ec5cb0_ab72_4a83_9ade_525d033e4972.slice/crio-d6f78b56fa167110dff6c8df6550980db859fa4902d47238da1c114c32ca4302 WatchSource:0}: Error finding container d6f78b56fa167110dff6c8df6550980db859fa4902d47238da1c114c32ca4302: Status 404 returned error can't find the container with id d6f78b56fa167110dff6c8df6550980db859fa4902d47238da1c114c32ca4302 Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.059656 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.059706 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a6a36cd-a276-44aa-99f5-a970db58b978-service-ca\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.059738 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a6a36cd-a276-44aa-99f5-a970db58b978-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.059760 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.059797 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a6a36cd-a276-44aa-99f5-a970db58b978-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.161742 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.161884 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a6a36cd-a276-44aa-99f5-a970db58b978-service-ca\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.161957 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a6a36cd-a276-44aa-99f5-a970db58b978-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.162020 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.162079 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a6a36cd-a276-44aa-99f5-a970db58b978-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.162236 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.161940 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a6a36cd-a276-44aa-99f5-a970db58b978-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.163015 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a6a36cd-a276-44aa-99f5-a970db58b978-service-ca\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.168022 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a6a36cd-a276-44aa-99f5-a970db58b978-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.184650 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a6a36cd-a276-44aa-99f5-a970db58b978-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-87x22\" (UID: \"8a6a36cd-a276-44aa-99f5-a970db58b978\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.276365 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" Dec 11 15:23:33 crc kubenswrapper[4723]: W1211 15:23:33.288005 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a6a36cd_a276_44aa_99f5_a970db58b978.slice/crio-f3e2b80c44427bfd3ae18833fc7ee59b4dd0bb3c6acfc5abb035e205d23282b5 WatchSource:0}: Error finding container f3e2b80c44427bfd3ae18833fc7ee59b4dd0bb3c6acfc5abb035e205d23282b5: Status 404 returned error can't find the container with id f3e2b80c44427bfd3ae18833fc7ee59b4dd0bb3c6acfc5abb035e205d23282b5 Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.364803 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:33 crc kubenswrapper[4723]: E1211 15:23:33.365011 4723 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:33 crc kubenswrapper[4723]: E1211 15:23:33.365079 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs podName:542875f8-72b1-4325-bc1d-95f3b2f53efc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:34.365055156 +0000 UTC m=+25.139288601 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs") pod "network-metrics-daemon-mwn6z" (UID: "542875f8-72b1-4325-bc1d-95f3b2f53efc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.528540 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.534051 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.538066 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.883656 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"5d4abfa0aceb9964fdca2a7ec5a8a416c28d282bd67a846d6dbeb3ff1cb4dc20"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.885855 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"aae79fbbc1e8b2aa81812b40ce9691af413a6dac0004f6e6cc4cb068e3543a1b"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.888036 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" exitCode=0 Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.888127 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.890015 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpc9f" event={"ID":"8923b3a7-6d56-4fb6-b496-b718ea3a2071","Type":"ContainerStarted","Data":"476a1dbd276dda0f93f3b0bd4cd8e1e91de8030b85868a4be8be434f7ea0941a"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.891434 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2v4p2" event={"ID":"e2062e88-d88f-4e28-abee-8ca69fd16bba","Type":"ContainerStarted","Data":"00ce639859a9b14dd3a59359e797245199c5a1b1ad15cde5cd747c2cb7061500"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.892548 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-xsqxj" event={"ID":"c75bb560-f1b5-4bd5-b297-a7bb9c46a60f","Type":"ContainerStarted","Data":"81f5e884b3f308969643b0600ca9b3aa5ec5b13a023fd128d947ede3b16101d4"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.894686 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="e04798515bd53088db4240e64d333fc81bb0c90b5c1890a94cc0b980bfaba657" exitCode=0 Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.894739 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"e04798515bd53088db4240e64d333fc81bb0c90b5c1890a94cc0b980bfaba657"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.895891 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" event={"ID":"8a6a36cd-a276-44aa-99f5-a970db58b978","Type":"ContainerStarted","Data":"f3e2b80c44427bfd3ae18833fc7ee59b4dd0bb3c6acfc5abb035e205d23282b5"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.897052 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" event={"ID":"51ec5cb0-ab72-4a83-9ade-525d033e4972","Type":"ContainerStarted","Data":"111dfacf0388dc9d721e5ca69301b2ccc136ccd906c8eff1bb6180dc6484c1d2"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.897124 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" event={"ID":"51ec5cb0-ab72-4a83-9ade-525d033e4972","Type":"ContainerStarted","Data":"d6f78b56fa167110dff6c8df6550980db859fa4902d47238da1c114c32ca4302"} Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.903162 4723 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.903164 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=0.903141311 podStartE2EDuration="903.141311ms" podCreationTimestamp="2025-12-11 15:23:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:33.901647311 +0000 UTC m=+24.675880746" watchObservedRunningTime="2025-12-11 15:23:33.903141311 +0000 UTC m=+24.677374746" Dec 11 15:23:33 crc kubenswrapper[4723]: E1211 15:23:33.904729 4723 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.918090 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podStartSLOduration=3.91805713 podStartE2EDuration="3.91805713s" podCreationTimestamp="2025-12-11 15:23:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:33.917340051 +0000 UTC m=+24.691573496" watchObservedRunningTime="2025-12-11 15:23:33.91805713 +0000 UTC m=+24.692290575" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.942039 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hpc9f" podStartSLOduration=2.942020991 podStartE2EDuration="2.942020991s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:33.940648184 +0000 UTC m=+24.714881619" watchObservedRunningTime="2025-12-11 15:23:33.942020991 +0000 UTC m=+24.716254426" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.969845 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2v4p2" podStartSLOduration=3.969823525 podStartE2EDuration="3.969823525s" podCreationTimestamp="2025-12-11 15:23:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:33.955208794 +0000 UTC m=+24.729442229" watchObservedRunningTime="2025-12-11 15:23:33.969823525 +0000 UTC m=+24.744056960" Dec 11 15:23:33 crc kubenswrapper[4723]: I1211 15:23:33.970332 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-xsqxj" podStartSLOduration=2.970325758 podStartE2EDuration="2.970325758s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:33.968383136 +0000 UTC m=+24.742616581" watchObservedRunningTime="2025-12-11 15:23:33.970325758 +0000 UTC m=+24.744559193" Dec 11 15:23:34 crc kubenswrapper[4723]: I1211 15:23:34.375151 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.375563 4723 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.375757 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs podName:542875f8-72b1-4325-bc1d-95f3b2f53efc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:36.375715832 +0000 UTC m=+27.149949307 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs") pod "network-metrics-daemon-mwn6z" (UID: "542875f8-72b1-4325-bc1d-95f3b2f53efc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:34 crc kubenswrapper[4723]: I1211 15:23:34.547271 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.547505 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:34 crc kubenswrapper[4723]: I1211 15:23:34.547306 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.547677 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:34 crc kubenswrapper[4723]: I1211 15:23:34.547318 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.547824 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:34 crc kubenswrapper[4723]: I1211 15:23:34.547287 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:34 crc kubenswrapper[4723]: E1211 15:23:34.547956 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.907743 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerStarted","Data":"03d215feb9ab0f8b06f4747003977edf67fb33b7c9a38e207c0e491a3979bac2"} Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.916499 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" event={"ID":"8a6a36cd-a276-44aa-99f5-a970db58b978","Type":"ContainerStarted","Data":"fe31ea84472e84ebd71442dcdb96e0eca9b1431ffad10589b197c5b70f1d6263"} Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.920403 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" event={"ID":"51ec5cb0-ab72-4a83-9ade-525d033e4972","Type":"ContainerStarted","Data":"c0028e2f000e6a32d60c3a691d243548bbfff816047cad982640117827e0a5ed"} Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.929677 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.929745 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} Dec 11 15:23:35 crc kubenswrapper[4723]: I1211 15:23:35.960337 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lsd4l" podStartSLOduration=3.960310154 podStartE2EDuration="3.960310154s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:35.960099808 +0000 UTC m=+26.734333243" watchObservedRunningTime="2025-12-11 15:23:35.960310154 +0000 UTC m=+26.734543589" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.301262 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.301540 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:44.301494941 +0000 UTC m=+35.075728386 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.301676 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.301816 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.301912 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:44.301888022 +0000 UTC m=+35.076121607 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.403499 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.403707 4723 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.403804 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs podName:542875f8-72b1-4325-bc1d-95f3b2f53efc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:40.403780308 +0000 UTC m=+31.178013733 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs") pod "network-metrics-daemon-mwn6z" (UID: "542875f8-72b1-4325-bc1d-95f3b2f53efc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.407292 4723 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.504779 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.504846 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.504908 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505053 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505100 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505124 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505153 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505167 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505131 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505207 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505168 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:44.505153159 +0000 UTC m=+35.279386584 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505240 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:44.505224001 +0000 UTC m=+35.279457496 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.505257 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:44.505248112 +0000 UTC m=+35.279481647 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.547916 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.547947 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.548041 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.548065 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.548182 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.548303 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.548288 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:36 crc kubenswrapper[4723]: E1211 15:23:36.548432 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.935467 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="03d215feb9ab0f8b06f4747003977edf67fb33b7c9a38e207c0e491a3979bac2" exitCode=0 Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.935553 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"03d215feb9ab0f8b06f4747003977edf67fb33b7c9a38e207c0e491a3979bac2"} Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.942888 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.942938 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.942951 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.942963 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} Dec 11 15:23:36 crc kubenswrapper[4723]: I1211 15:23:36.964219 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-87x22" podStartSLOduration=6.964166809 podStartE2EDuration="6.964166809s" podCreationTimestamp="2025-12-11 15:23:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:35.977716789 +0000 UTC m=+26.751950234" watchObservedRunningTime="2025-12-11 15:23:36.964166809 +0000 UTC m=+27.738400234" Dec 11 15:23:37 crc kubenswrapper[4723]: I1211 15:23:37.950425 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="5b2444ae74a5ecc21722e0d69b54d945b96450e4c93e55dd99e083838546568f" exitCode=0 Dec 11 15:23:37 crc kubenswrapper[4723]: I1211 15:23:37.950561 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"5b2444ae74a5ecc21722e0d69b54d945b96450e4c93e55dd99e083838546568f"} Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.548148 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.548296 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.548416 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:38 crc kubenswrapper[4723]: E1211 15:23:38.548409 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.548453 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:38 crc kubenswrapper[4723]: E1211 15:23:38.548564 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:38 crc kubenswrapper[4723]: E1211 15:23:38.548687 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:38 crc kubenswrapper[4723]: E1211 15:23:38.548793 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.958482 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="def277e716d35d25028757520ee80c3bf9db0d7634a4fdd8ba2f8ee27e26ce52" exitCode=0 Dec 11 15:23:38 crc kubenswrapper[4723]: I1211 15:23:38.958561 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"def277e716d35d25028757520ee80c3bf9db0d7634a4fdd8ba2f8ee27e26ce52"} Dec 11 15:23:39 crc kubenswrapper[4723]: I1211 15:23:39.967064 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} Dec 11 15:23:39 crc kubenswrapper[4723]: I1211 15:23:39.970951 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="97c64c4c4f6a472419f55a22df4b59830b39396ea0b35b845d89bcf8c9d1f4ea" exitCode=0 Dec 11 15:23:39 crc kubenswrapper[4723]: I1211 15:23:39.970999 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"97c64c4c4f6a472419f55a22df4b59830b39396ea0b35b845d89bcf8c9d1f4ea"} Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.239910 4723 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod579cfad2_6e0a_4e52_b84d_0d17b6261f89.slice/crio-c0032902c7bf9509f0fbbe6884611571a143e24c7248dbfc9f5c9912474856af.scope\": RecentStats: unable to find data in memory cache]" Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.452431 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.453130 4723 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.453358 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs podName:542875f8-72b1-4325-bc1d-95f3b2f53efc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:48.453324579 +0000 UTC m=+39.227558004 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs") pod "network-metrics-daemon-mwn6z" (UID: "542875f8-72b1-4325-bc1d-95f3b2f53efc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.548131 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.548189 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.548243 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.548313 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.548131 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.548431 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.548545 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:40 crc kubenswrapper[4723]: E1211 15:23:40.548763 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.985780 4723 generic.go:334] "Generic (PLEG): container finished" podID="579cfad2-6e0a-4e52-b84d-0d17b6261f89" containerID="c0032902c7bf9509f0fbbe6884611571a143e24c7248dbfc9f5c9912474856af" exitCode=0 Dec 11 15:23:40 crc kubenswrapper[4723]: I1211 15:23:40.985845 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerDied","Data":"c0032902c7bf9509f0fbbe6884611571a143e24c7248dbfc9f5c9912474856af"} Dec 11 15:23:41 crc kubenswrapper[4723]: I1211 15:23:41.993564 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-j85c2" event={"ID":"579cfad2-6e0a-4e52-b84d-0d17b6261f89","Type":"ContainerStarted","Data":"d424ccadbeefaeac4410919e7690ef8430467b3fce18ddca790e7bb6ae1fac13"} Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.000019 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerStarted","Data":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.000373 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.000397 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.017099 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-j85c2" podStartSLOduration=11.017075871 podStartE2EDuration="11.017075871s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:42.016184837 +0000 UTC m=+32.790418332" watchObservedRunningTime="2025-12-11 15:23:42.017075871 +0000 UTC m=+32.791309306" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.033669 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.051253 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podStartSLOduration=11.051235084 podStartE2EDuration="11.051235084s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:42.050174666 +0000 UTC m=+32.824408111" watchObservedRunningTime="2025-12-11 15:23:42.051235084 +0000 UTC m=+32.825468519" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.547637 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.547674 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:42 crc kubenswrapper[4723]: E1211 15:23:42.548201 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.547699 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:42 crc kubenswrapper[4723]: I1211 15:23:42.547687 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:42 crc kubenswrapper[4723]: E1211 15:23:42.548288 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:42 crc kubenswrapper[4723]: E1211 15:23:42.548445 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:42 crc kubenswrapper[4723]: E1211 15:23:42.548507 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:43 crc kubenswrapper[4723]: I1211 15:23:43.004638 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:43 crc kubenswrapper[4723]: I1211 15:23:43.041192 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:23:43 crc kubenswrapper[4723]: I1211 15:23:43.839173 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mwn6z"] Dec 11 15:23:43 crc kubenswrapper[4723]: I1211 15:23:43.839355 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:43 crc kubenswrapper[4723]: E1211 15:23:43.839675 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.399396 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.399520 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.399674 4723 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.399835 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.399805863 +0000 UTC m=+51.174039298 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.400284 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.4000831 +0000 UTC m=+51.174362856 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.547798 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.547867 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.547942 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.548100 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.548152 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.548481 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.602371 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.602445 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:44 crc kubenswrapper[4723]: I1211 15:23:44.602509 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602652 4723 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602769 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.602747482 +0000 UTC m=+51.376980937 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602677 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602822 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602839 4723 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.602899 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.602881835 +0000 UTC m=+51.377115280 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.603091 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.603130 4723 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.603145 4723 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:44 crc kubenswrapper[4723]: E1211 15:23:44.603185 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.603173443 +0000 UTC m=+51.377406938 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 15:23:45 crc kubenswrapper[4723]: I1211 15:23:45.547452 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:45 crc kubenswrapper[4723]: E1211 15:23:45.547656 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-mwn6z" podUID="542875f8-72b1-4325-bc1d-95f3b2f53efc" Dec 11 15:23:46 crc kubenswrapper[4723]: I1211 15:23:46.547348 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:46 crc kubenswrapper[4723]: E1211 15:23:46.548141 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 15:23:46 crc kubenswrapper[4723]: I1211 15:23:46.548229 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:46 crc kubenswrapper[4723]: I1211 15:23:46.548259 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:46 crc kubenswrapper[4723]: E1211 15:23:46.549490 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 15:23:46 crc kubenswrapper[4723]: E1211 15:23:46.549617 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.201713 4723 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.202056 4723 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.247258 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.247778 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.249828 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ndhbx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.250170 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.253031 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.253616 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.262581 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-sjsqg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.263359 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.265357 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.266319 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: W1211 15:23:47.271462 4723 reflector.go:561] object-"openshift-console"/"default-dockercfg-chnjx": failed to list *v1.Secret: secrets "default-dockercfg-chnjx" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 11 15:23:47 crc kubenswrapper[4723]: E1211 15:23:47.271704 4723 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"default-dockercfg-chnjx\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-chnjx\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.272752 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.274474 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: W1211 15:23:47.275284 4723 reflector.go:561] object-"openshift-console"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 11 15:23:47 crc kubenswrapper[4723]: E1211 15:23:47.275356 4723 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.296587 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 15:23:47 crc kubenswrapper[4723]: W1211 15:23:47.296696 4723 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 11 15:23:47 crc kubenswrapper[4723]: E1211 15:23:47.296793 4723 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.297151 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 15:23:47 crc kubenswrapper[4723]: W1211 15:23:47.297306 4723 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 11 15:23:47 crc kubenswrapper[4723]: E1211 15:23:47.297441 4723 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.297939 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.298058 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.298116 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.308574 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.309053 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.310658 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dw6bx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311342 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311537 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311736 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311888 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311982 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312031 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312127 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312174 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312218 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312291 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312324 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312334 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312433 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: W1211 15:23:47.312463 4723 reflector.go:561] object-"openshift-console"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console": no relationship found between node 'crc' and this object Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312490 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: E1211 15:23:47.312507 4723 reflector.go:158] "Unhandled Error" err="object-\"openshift-console\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.311892 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312591 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312629 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312710 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312715 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312735 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312757 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312829 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312864 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.312950 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.313072 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.313408 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.313844 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.314206 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.314862 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fhg8f"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.315380 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.320800 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.321069 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.321674 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5wvnh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.322225 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.322543 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.323101 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.323396 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.325110 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.328601 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.329182 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.329710 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.335595 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-hw8r7"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.336283 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.336685 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.337035 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.337632 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.338076 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.338077 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339549 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339594 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-serving-cert\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339628 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339675 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-machine-approver-tls\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339710 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-images\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339737 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339764 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339791 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfl7r\" (UniqueName: \"kubernetes.io/projected/810dc990-b95f-403e-ab68-5c65f34396bf-kube-api-access-cfl7r\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339820 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-dir\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339877 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-config\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339912 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939fb1b2-4cc1-4c31-a3d1-eb748154b875-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339938 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.339990 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-policies\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340020 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zckmz\" (UniqueName: \"kubernetes.io/projected/ec840a62-9898-474e-bdbf-f92b2f01174b-kube-api-access-zckmz\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340066 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340115 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-auth-proxy-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340160 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-encryption-config\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340199 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/810dc990-b95f-403e-ab68-5c65f34396bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340228 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kghrk\" (UniqueName: \"kubernetes.io/projected/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-kube-api-access-kghrk\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340336 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bh5p\" (UniqueName: \"kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p\") pod \"downloads-7954f5f757-sjsqg\" (UID: \"3cabbadb-fc14-4253-a767-d153aa9604bc\") " pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340386 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-client\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340432 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/939fb1b2-4cc1-4c31-a3d1-eb748154b875-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340464 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdcw7\" (UniqueName: \"kubernetes.io/projected/939fb1b2-4cc1-4c31-a3d1-eb748154b875-kube-api-access-tdcw7\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340540 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340577 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.340607 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.363994 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.364226 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.364373 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.364514 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.364526 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mgx97"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.364676 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.375673 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.376077 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.376998 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.380410 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.380669 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.380940 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.381915 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.382356 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.383335 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.383794 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.383860 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.383924 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.384090 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.384126 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.384025 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.383792 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.384654 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.388013 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jrjdt"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.389235 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.389462 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.396455 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.397102 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.419031 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.421861 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.422118 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.423984 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424242 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424313 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424323 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424342 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424381 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.424411 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.425008 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.425180 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.425587 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.426248 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.426291 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.427130 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428047 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428273 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428408 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428670 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428942 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.429106 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.429328 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.429434 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.429769 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.429871 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.430107 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.430136 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.430203 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.430145 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.428951 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.431458 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.431625 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432035 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432103 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432196 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432336 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432431 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432580 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432656 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.432745 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.434745 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445443 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445538 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm4gm\" (UniqueName: \"kubernetes.io/projected/154afe62-f77d-4434-9250-6dc1a2a8b252-kube-api-access-rm4gm\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445577 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445598 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445615 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwbz8\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-kube-api-access-wwbz8\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445642 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-serving-cert\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445663 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445687 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445714 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445733 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fb65e41-05fd-483d-a2ab-dfa663b0660c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445806 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9cw8\" (UniqueName: \"kubernetes.io/projected/1dae0577-a799-4ba7-9cc2-f6c38436bae4-kube-api-access-n9cw8\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445812 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445828 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-machine-approver-tls\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445847 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445870 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445887 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrl2m\" (UniqueName: \"kubernetes.io/projected/a948432a-a38e-4465-9b5d-7b841b06d81f-kube-api-access-xrl2m\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445908 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7734e148-f74c-4b24-ac4e-02d85b478850-proxy-tls\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445928 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445948 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-images\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.445984 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446004 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446023 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-oauth-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446049 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446132 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446659 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.446917 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fr92q"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.447007 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.447484 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448097 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-images\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448566 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448775 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4f87bd-6c70-4163-a655-957bcc992271-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448817 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb4p2\" (UniqueName: \"kubernetes.io/projected/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-kube-api-access-zb4p2\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448844 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-serving-cert\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448869 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9953bf77-d4f2-4168-81b2-fdb772f44212-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448928 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-stats-auth\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.448956 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-config\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.450059 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.450598 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.451353 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.453021 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454021 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454133 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-serving-cert\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454439 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454484 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfl7r\" (UniqueName: \"kubernetes.io/projected/810dc990-b95f-403e-ab68-5c65f34396bf-kube-api-access-cfl7r\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454519 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvp45\" (UniqueName: \"kubernetes.io/projected/7734e148-f74c-4b24-ac4e-02d85b478850-kube-api-access-zvp45\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454559 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fb65e41-05fd-483d-a2ab-dfa663b0660c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454589 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-dir\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454621 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65e96d17-3f27-42cb-a6cc-b911057378ab-service-ca-bundle\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454698 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-key\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454739 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454768 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-auth-proxy-config\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454796 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-config\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454823 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/154afe62-f77d-4434-9250-6dc1a2a8b252-serving-cert\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454848 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-node-pullsecrets\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454874 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnt6r\" (UniqueName: \"kubernetes.io/projected/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-kube-api-access-nnt6r\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454903 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g746l\" (UniqueName: \"kubernetes.io/projected/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-kube-api-access-g746l\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454929 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454953 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a89db749-53e1-4e74-b58b-2f8f4e990d68-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455006 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455039 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939fb1b2-4cc1-4c31-a3d1-eb748154b875-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455065 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4f87bd-6c70-4163-a655-957bcc992271-config\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455091 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45vwg\" (UniqueName: \"kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455115 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-oauth-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455139 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-images\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455167 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-service-ca\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455203 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-encryption-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455229 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455269 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-policies\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455305 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455344 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f4f87bd-6c70-4163-a655-957bcc992271-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455377 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqx5r\" (UniqueName: \"kubernetes.io/projected/65e96d17-3f27-42cb-a6cc-b911057378ab-kube-api-access-lqx5r\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455403 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a89db749-53e1-4e74-b58b-2f8f4e990d68-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455432 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fb65e41-05fd-483d-a2ab-dfa663b0660c-config\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455465 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7593c9bb-a459-41a7-87ed-9715551fb659-serving-cert\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455493 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-service-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455531 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zckmz\" (UniqueName: \"kubernetes.io/projected/ec840a62-9898-474e-bdbf-f92b2f01174b-kube-api-access-zckmz\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455559 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-metrics-certs\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455587 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455632 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/154afe62-f77d-4434-9250-6dc1a2a8b252-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455659 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-cabundle\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455698 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455724 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-auth-proxy-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455755 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-encryption-config\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455762 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/810dc990-b95f-403e-ab68-5c65f34396bf-config\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.455782 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-default-certificate\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.454903 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-dir\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456504 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-audit-policies\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456560 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/810dc990-b95f-403e-ab68-5c65f34396bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456595 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-serving-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456606 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-auth-proxy-config\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456626 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbchh\" (UniqueName: \"kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456659 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9953bf77-d4f2-4168-81b2-fdb772f44212-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.456705 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.473251 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.473871 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939fb1b2-4cc1-4c31-a3d1-eb748154b875-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.473930 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.474377 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-encryption-config\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.474890 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-machine-approver-tls\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477581 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kghrk\" (UniqueName: \"kubernetes.io/projected/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-kube-api-access-kghrk\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477676 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-client\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477715 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6xpp\" (UniqueName: \"kubernetes.io/projected/b789a0fc-d92b-43f2-bb28-0e522ae80af8-kube-api-access-c6xpp\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477742 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-trusted-ca-bundle\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477845 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-client\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477880 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bh5p\" (UniqueName: \"kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p\") pod \"downloads-7954f5f757-sjsqg\" (UID: \"3cabbadb-fc14-4253-a767-d153aa9604bc\") " pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477912 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit-dir\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.477944 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-srv-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478067 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478101 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-image-import-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478141 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/939fb1b2-4cc1-4c31-a3d1-eb748154b875-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478168 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdcw7\" (UniqueName: \"kubernetes.io/projected/939fb1b2-4cc1-4c31-a3d1-eb748154b875-kube-api-access-tdcw7\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478203 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478239 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn9p2\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-kube-api-access-rn9p2\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478341 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478392 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478422 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9grwr\" (UniqueName: \"kubernetes.io/projected/7593c9bb-a459-41a7-87ed-9715551fb659-kube-api-access-9grwr\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478452 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88tf9\" (UniqueName: \"kubernetes.io/projected/97027ce1-2e5b-4186-90fc-089d66251247-kube-api-access-88tf9\") pod \"migrator-59844c95c7-qhkkp\" (UID: \"97027ce1-2e5b-4186-90fc-089d66251247\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478495 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478529 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.480201 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.489731 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.490909 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.492223 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5pwb"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.478439 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/810dc990-b95f-403e-ab68-5c65f34396bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.494317 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-client\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.494445 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.494754 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.494991 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.495944 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.496236 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.496402 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.521888 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.522182 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.522608 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.522799 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.523251 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec840a62-9898-474e-bdbf-f92b2f01174b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.524206 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.524728 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.524891 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/939fb1b2-4cc1-4c31-a3d1-eb748154b875-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.524992 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.526124 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.526135 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.528902 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.529188 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.530251 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.533633 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nxcpx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.534267 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.534691 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.536706 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tdqkq"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.538553 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.538936 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.539452 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.539902 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.540665 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.542808 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.543991 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.544518 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sjsqg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.546551 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ndhbx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.548132 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.550235 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.556443 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.557566 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.557703 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.557716 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dw6bx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.558212 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.558291 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-s276d"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.559388 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.559664 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.559785 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.559589 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-s276d" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.562168 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.564192 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.565198 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.566709 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fhg8f"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.566980 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.567896 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.569799 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5wvnh"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.570626 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-jqpj6"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.571459 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.571822 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-tbf9x"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.572260 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.572686 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5pwb"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.573723 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-x4pzx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.575649 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.575705 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.575739 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.577373 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.578708 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579573 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88tf9\" (UniqueName: \"kubernetes.io/projected/97027ce1-2e5b-4186-90fc-089d66251247-kube-api-access-88tf9\") pod \"migrator-59844c95c7-qhkkp\" (UID: \"97027ce1-2e5b-4186-90fc-089d66251247\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579654 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17f73a-62e0-4e24-924c-a449a813f55b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579687 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579717 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579743 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b651358-a4e6-40b5-a8db-f4108332e022-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579773 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm4gm\" (UniqueName: \"kubernetes.io/projected/154afe62-f77d-4434-9250-6dc1a2a8b252-kube-api-access-rm4gm\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579793 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579814 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579835 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579857 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579901 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwbz8\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-kube-api-access-wwbz8\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579934 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de1f58e2-b8f5-468e-80f7-6476eefc67f0-config\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.579985 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7jjr\" (UniqueName: \"kubernetes.io/projected/de1f58e2-b8f5-468e-80f7-6476eefc67f0-kube-api-access-x7jjr\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580008 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-client\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580042 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580061 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fb65e41-05fd-483d-a2ab-dfa663b0660c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580082 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9cw8\" (UniqueName: \"kubernetes.io/projected/1dae0577-a799-4ba7-9cc2-f6c38436bae4-kube-api-access-n9cw8\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580191 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580215 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcsr8\" (UniqueName: \"kubernetes.io/projected/816fbc57-2230-443a-979a-42cecc91e498-kube-api-access-fcsr8\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580237 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580258 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580285 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvp45\" (UniqueName: \"kubernetes.io/projected/7734e148-f74c-4b24-ac4e-02d85b478850-kube-api-access-zvp45\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580302 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fb65e41-05fd-483d-a2ab-dfa663b0660c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580322 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65e96d17-3f27-42cb-a6cc-b911057378ab-service-ca-bundle\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580343 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580374 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580395 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-auth-proxy-config\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580418 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580457 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-node-pullsecrets\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580477 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kxk\" (UniqueName: \"kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580504 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4f87bd-6c70-4163-a655-957bcc992271-config\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580524 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45vwg\" (UniqueName: \"kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580542 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580558 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-oauth-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580577 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-service-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580597 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/166ce132-af47-4b71-930e-bd80549c3d3f-proxy-tls\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580617 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.580639 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.582349 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.584677 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.585476 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f4f87bd-6c70-4163-a655-957bcc992271-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.585558 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a89db749-53e1-4e74-b58b-2f8f4e990d68-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.585603 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fb65e41-05fd-483d-a2ab-dfa663b0660c-config\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.588388 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.588446 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.587321 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.588252 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-auth-proxy-config\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.588537 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jrjdt"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.588554 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.586403 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-oauth-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.587139 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f4f87bd-6c70-4163-a655-957bcc992271-config\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589263 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-metrics-certs\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589405 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7593c9bb-a459-41a7-87ed-9715551fb659-serving-cert\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589505 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-service-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589631 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-serving-cert\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589675 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.590587 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.591173 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65e96d17-3f27-42cb-a6cc-b911057378ab-service-ca-bundle\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.591400 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.590756 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.591609 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.592939 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2fb65e41-05fd-483d-a2ab-dfa663b0660c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.589633 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-node-pullsecrets\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.593114 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2fb65e41-05fd-483d-a2ab-dfa663b0660c-config\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.595602 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-metrics-certs\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.595694 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mgx97"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.595723 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-service-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.596476 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-client\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.596636 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6xpp\" (UniqueName: \"kubernetes.io/projected/b789a0fc-d92b-43f2-bb28-0e522ae80af8-kube-api-access-c6xpp\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.596760 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9953bf77-d4f2-4168-81b2-fdb772f44212-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.597219 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/a89db749-53e1-4e74-b58b-2f8f4e990d68-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.597313 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit-dir\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598161 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-srv-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598263 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598263 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b789a0fc-d92b-43f2-bb28-0e522ae80af8-audit-dir\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598551 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9grwr\" (UniqueName: \"kubernetes.io/projected/7593c9bb-a459-41a7-87ed-9715551fb659-kube-api-access-9grwr\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598747 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.598816 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-config\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599197 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599640 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-client\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599701 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599764 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599805 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599876 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599931 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.599989 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600043 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrl2m\" (UniqueName: \"kubernetes.io/projected/a948432a-a38e-4465-9b5d-7b841b06d81f-kube-api-access-xrl2m\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600092 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7734e148-f74c-4b24-ac4e-02d85b478850-proxy-tls\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600130 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-oauth-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600168 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-srv-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600195 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4f87bd-6c70-4163-a655-957bcc992271-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600226 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb4p2\" (UniqueName: \"kubernetes.io/projected/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-kube-api-access-zb4p2\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600634 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-serving-cert\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600678 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9953bf77-d4f2-4168-81b2-fdb772f44212-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.600982 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-stats-auth\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601126 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-config\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601410 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l5nx\" (UniqueName: \"kubernetes.io/projected/166ce132-af47-4b71-930e-bd80549c3d3f-kube-api-access-6l5nx\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601664 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-key\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601775 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-config\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601893 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7593c9bb-a459-41a7-87ed-9715551fb659-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.601927 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602002 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/154afe62-f77d-4434-9250-6dc1a2a8b252-serving-cert\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602047 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnt6r\" (UniqueName: \"kubernetes.io/projected/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-kube-api-access-nnt6r\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602097 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g746l\" (UniqueName: \"kubernetes.io/projected/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-kube-api-access-g746l\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602140 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a89db749-53e1-4e74-b58b-2f8f4e990d68-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602178 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfk5w\" (UniqueName: \"kubernetes.io/projected/cba98b6e-0115-4c3e-801e-23f24e4720d9-kube-api-access-rfk5w\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602221 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-images\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602272 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602314 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602360 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-567jr\" (UniqueName: \"kubernetes.io/projected/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-kube-api-access-567jr\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602411 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-encryption-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602449 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-service-ca\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602485 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de1f58e2-b8f5-468e-80f7-6476eefc67f0-serving-cert\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602530 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602574 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grmrc\" (UniqueName: \"kubernetes.io/projected/7b651358-a4e6-40b5-a8db-f4108332e022-kube-api-access-grmrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602641 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gjzk\" (UniqueName: \"kubernetes.io/projected/dd17f73a-62e0-4e24-924c-a449a813f55b-kube-api-access-7gjzk\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602714 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602774 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqx5r\" (UniqueName: \"kubernetes.io/projected/65e96d17-3f27-42cb-a6cc-b911057378ab-kube-api-access-lqx5r\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602783 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-srv-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602823 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/166ce132-af47-4b71-930e-bd80549c3d3f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602881 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602937 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/154afe62-f77d-4434-9250-6dc1a2a8b252-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.602985 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-cabundle\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603022 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-default-certificate\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603060 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-serving-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603121 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbchh\" (UniqueName: \"kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603153 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-trusted-ca-bundle\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603188 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603259 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-image-import-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603288 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603320 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn9p2\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-kube-api-access-rn9p2\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.603354 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.604669 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-serving-cert\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.605064 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-service-ca\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.605549 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/154afe62-f77d-4434-9250-6dc1a2a8b252-available-featuregates\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.605719 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-stats-auth\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.606131 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f4f87bd-6c70-4163-a655-957bcc992271-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.606180 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.606382 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-etcd-serving-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.606673 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b789a0fc-d92b-43f2-bb28-0e522ae80af8-encryption-config\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.606892 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a89db749-53e1-4e74-b58b-2f8f4e990d68-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.607195 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.607374 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a948432a-a38e-4465-9b5d-7b841b06d81f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608188 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7593c9bb-a459-41a7-87ed-9715551fb659-serving-cert\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608506 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-image-import-ca\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608607 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608710 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-serving-cert\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608706 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7734e148-f74c-4b24-ac4e-02d85b478850-proxy-tls\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608923 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1dae0577-a799-4ba7-9cc2-f6c38436bae4-trusted-ca-bundle\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.608926 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/65e96d17-3f27-42cb-a6cc-b911057378ab-default-certificate\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.609697 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7734e148-f74c-4b24-ac4e-02d85b478850-images\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.609857 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/154afe62-f77d-4434-9250-6dc1a2a8b252-serving-cert\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612325 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b789a0fc-d92b-43f2-bb28-0e522ae80af8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612438 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612575 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-cabundle\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612724 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-signing-key\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612754 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1dae0577-a799-4ba7-9cc2-f6c38436bae4-console-oauth-config\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.612852 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.613250 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.614587 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fr92q"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.615692 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.616744 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.617938 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.619037 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.620150 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.620567 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.622490 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qm4lg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.624155 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x4pzx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.624289 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.624820 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-s276d"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.626456 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.627388 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nxcpx"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.629940 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.630466 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.632551 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.634365 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qm4lg"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.635488 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tdqkq"] Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.650459 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.669907 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.691354 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704371 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704413 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-trusted-ca\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704461 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17f73a-62e0-4e24-924c-a449a813f55b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704487 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704509 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704533 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b651358-a4e6-40b5-a8db-f4108332e022-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704628 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704650 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704713 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704827 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de1f58e2-b8f5-468e-80f7-6476eefc67f0-config\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704851 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7jjr\" (UniqueName: \"kubernetes.io/projected/de1f58e2-b8f5-468e-80f7-6476eefc67f0-kube-api-access-x7jjr\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.704890 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-client\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705076 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9953bf77-d4f2-4168-81b2-fdb772f44212-metrics-tls\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705184 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcsr8\" (UniqueName: \"kubernetes.io/projected/816fbc57-2230-443a-979a-42cecc91e498-kube-api-access-fcsr8\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705305 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705340 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705377 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705413 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705445 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kxk\" (UniqueName: \"kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705484 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-service-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705519 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/166ce132-af47-4b71-930e-bd80549c3d3f-proxy-tls\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705547 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705609 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705651 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705718 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-serving-cert\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705727 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705846 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.705926 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4aef2280-346e-448c-90a2-bdb01a0e9b2d-serving-cert\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706011 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-config\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706037 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706062 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706083 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706114 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-srv-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706157 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l5nx\" (UniqueName: \"kubernetes.io/projected/166ce132-af47-4b71-930e-bd80549c3d3f-kube-api-access-6l5nx\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706248 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfk5w\" (UniqueName: \"kubernetes.io/projected/cba98b6e-0115-4c3e-801e-23f24e4720d9-kube-api-access-rfk5w\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706341 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706380 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706409 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-567jr\" (UniqueName: \"kubernetes.io/projected/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-kube-api-access-567jr\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706441 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grmrc\" (UniqueName: \"kubernetes.io/projected/7b651358-a4e6-40b5-a8db-f4108332e022-kube-api-access-grmrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706474 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de1f58e2-b8f5-468e-80f7-6476eefc67f0-serving-cert\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706500 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706532 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gjzk\" (UniqueName: \"kubernetes.io/projected/dd17f73a-62e0-4e24-924c-a449a813f55b-kube-api-access-7gjzk\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706562 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kxfd\" (UniqueName: \"kubernetes.io/projected/4aef2280-346e-448c-90a2-bdb01a0e9b2d-kube-api-access-9kxfd\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706592 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.706684 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/166ce132-af47-4b71-930e-bd80549c3d3f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.707577 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/166ce132-af47-4b71-930e-bd80549c3d3f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.708277 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-profile-collector-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.721048 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.728449 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9953bf77-d4f2-4168-81b2-fdb772f44212-trusted-ca\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.730740 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.751564 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.760372 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/816fbc57-2230-443a-979a-42cecc91e498-srv-cert\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.770204 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.791108 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.802069 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.818127 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.818265 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4aef2280-346e-448c-90a2-bdb01a0e9b2d-serving-cert\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.818427 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kxfd\" (UniqueName: \"kubernetes.io/projected/4aef2280-346e-448c-90a2-bdb01a0e9b2d-kube-api-access-9kxfd\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.818481 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-trusted-ca\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.848997 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.850269 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.851744 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.857690 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.859983 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.860096 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.870164 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.879629 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.891430 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.909836 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.920169 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.931775 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.951017 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.970378 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 15:23:47 crc kubenswrapper[4723]: I1211 15:23:47.992071 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.011559 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.039682 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.051096 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.071465 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.091292 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.100342 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de1f58e2-b8f5-468e-80f7-6476eefc67f0-serving-cert\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.110594 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.131801 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.136845 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/de1f58e2-b8f5-468e-80f7-6476eefc67f0-config\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.150483 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.159221 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/166ce132-af47-4b71-930e-bd80549c3d3f-proxy-tls\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.194130 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.195896 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfl7r\" (UniqueName: \"kubernetes.io/projected/810dc990-b95f-403e-ab68-5c65f34396bf-kube-api-access-cfl7r\") pod \"machine-api-operator-5694c8668f-ndhbx\" (UID: \"810dc990-b95f-403e-ab68-5c65f34396bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.221447 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.226745 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zckmz\" (UniqueName: \"kubernetes.io/projected/ec840a62-9898-474e-bdbf-f92b2f01174b-kube-api-access-zckmz\") pod \"apiserver-7bbb656c7d-nf5xs\" (UID: \"ec840a62-9898-474e-bdbf-f92b2f01174b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.265188 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kghrk\" (UniqueName: \"kubernetes.io/projected/ca6421f1-1c3d-4fbb-bf31-10f7f45f127b-kube-api-access-kghrk\") pod \"machine-approver-56656f9798-tklxz\" (UID: \"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.310678 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.315142 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdcw7\" (UniqueName: \"kubernetes.io/projected/939fb1b2-4cc1-4c31-a3d1-eb748154b875-kube-api-access-tdcw7\") pod \"openshift-controller-manager-operator-756b6f6bc6-xtfgh\" (UID: \"939fb1b2-4cc1-4c31-a3d1-eb748154b875\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.317525 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/7b651358-a4e6-40b5-a8db-f4108332e022-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.327085 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.330883 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 15:23:48 crc kubenswrapper[4723]: W1211 15:23:48.340124 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca6421f1_1c3d_4fbb_bf31_10f7f45f127b.slice/crio-58ca075500ce9de4b038d4dab677ccbad12e9450052950cb1f6985768acc2b77 WatchSource:0}: Error finding container 58ca075500ce9de4b038d4dab677ccbad12e9450052950cb1f6985768acc2b77: Status 404 returned error can't find the container with id 58ca075500ce9de4b038d4dab677ccbad12e9450052950cb1f6985768acc2b77 Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.350130 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.371567 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.380398 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd17f73a-62e0-4e24-924c-a449a813f55b-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.391649 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.411470 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.430581 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.436602 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-service-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.451198 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.455684 4723 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.455757 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert podName:85180921-c4ba-4b06-9240-4d35a5c57248 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:48.955737614 +0000 UTC m=+39.729971049 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert") pod "controller-manager-879f6c89f-7htrb" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248") : failed to sync secret cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.471213 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.479261 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.490460 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.496657 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.509798 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.510118 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.528220 4723 request.go:700] Waited for 1.004808269s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-etcd-operator/secrets?fieldSelector=metadata.name%3Detcd-client&limit=500&resourceVersion=0 Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.530702 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.531362 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.540225 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-client\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.547206 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.547294 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.547201 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.551693 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.571100 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.590809 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.596702 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-config\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.610939 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.617655 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/cba98b6e-0115-4c3e-801e-23f24e4720d9-etcd-ca\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.631305 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.640149 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cba98b6e-0115-4c3e-801e-23f24e4720d9-serving-cert\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.649902 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.671005 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.690932 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.700202 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.706379 4723 configmap.go:193] Couldn't get configMap openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.707236 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config podName:4bd0ccab-0a40-492f-8f39-b451b8c36c1c nodeName:}" failed. No retries permitted until 2025-12-11 15:23:49.207016557 +0000 UTC m=+39.981250002 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config") pod "openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" (UID: "4bd0ccab-0a40-492f-8f39-b451b8c36c1c") : failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.710327 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.730435 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.751353 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.771003 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.799617 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.811087 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.819673 4723 configmap.go:193] Couldn't get configMap openshift-console-operator/console-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: E1211 15:23:48.819775 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config podName:4aef2280-346e-448c-90a2-bdb01a0e9b2d nodeName:}" failed. No retries permitted until 2025-12-11 15:23:49.319756333 +0000 UTC m=+40.093989768 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config") pod "console-operator-58897d9998-nxcpx" (UID: "4aef2280-346e-448c-90a2-bdb01a0e9b2d") : failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.824789 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.827543 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-trusted-ca\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.829223 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.828913 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.829191 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.829418 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.830095 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4aef2280-346e-448c-90a2-bdb01a0e9b2d-serving-cert\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.832610 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.842756 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.851197 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.870771 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.895369 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.911857 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.935463 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.951165 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.971519 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.988542 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:23:48 crc kubenswrapper[4723]: I1211 15:23:48.991650 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.010397 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.030489 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.033809 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.040057 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.041674 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" event={"ID":"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b","Type":"ContainerStarted","Data":"9f060eb9b91a1e295ade63ae036d2c0ef0ad25628e59b70e01e1b96cbf62c956"} Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.041756 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" event={"ID":"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b","Type":"ContainerStarted","Data":"58ca075500ce9de4b038d4dab677ccbad12e9450052950cb1f6985768acc2b77"} Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.050329 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.050628 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 15:23:49 crc kubenswrapper[4723]: W1211 15:23:49.065749 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod939fb1b2_4cc1_4c31_a3d1_eb748154b875.slice/crio-a3183b875307a11386f452c4ee8053114a3581d6bbc86aff0e680ffa540697dd WatchSource:0}: Error finding container a3183b875307a11386f452c4ee8053114a3581d6bbc86aff0e680ffa540697dd: Status 404 returned error can't find the container with id a3183b875307a11386f452c4ee8053114a3581d6bbc86aff0e680ffa540697dd Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.071105 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.075606 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-ndhbx"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.091589 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 15:23:49 crc kubenswrapper[4723]: W1211 15:23:49.095326 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod810dc990_b95f_403e_ab68_5c65f34396bf.slice/crio-be6b76f371d29e06102b2dd41dd996a683ab1c182af4f2f83cef88d35549726f WatchSource:0}: Error finding container be6b76f371d29e06102b2dd41dd996a683ab1c182af4f2f83cef88d35549726f: Status 404 returned error can't find the container with id be6b76f371d29e06102b2dd41dd996a683ab1c182af4f2f83cef88d35549726f Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.109831 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.130889 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.151058 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.171035 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.180443 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/542875f8-72b1-4325-bc1d-95f3b2f53efc-metrics-certs\") pod \"network-metrics-daemon-mwn6z\" (UID: \"542875f8-72b1-4325-bc1d-95f3b2f53efc\") " pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.190000 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.212026 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.230930 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.243079 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:49 crc kubenswrapper[4723]: E1211 15:23:49.243145 4723 projected.go:288] Couldn't get configMap openshift-controller-manager/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:49 crc kubenswrapper[4723]: E1211 15:23:49.243177 4723 projected.go:194] Error preparing data for projected volume kube-api-access-wh5mc for pod openshift-controller-manager/controller-manager-879f6c89f-7htrb: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:49 crc kubenswrapper[4723]: E1211 15:23:49.243268 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc podName:85180921-c4ba-4b06-9240-4d35a5c57248 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:49.743241612 +0000 UTC m=+40.517475047 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-wh5mc" (UniqueName: "kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc") pod "controller-manager-879f6c89f-7htrb" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248") : failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.244254 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.250091 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.270893 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 15:23:49 crc kubenswrapper[4723]: E1211 15:23:49.283696 4723 projected.go:288] Couldn't get configMap openshift-console/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.283899 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-mwn6z" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.291239 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.311925 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.330996 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.345268 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.346280 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4aef2280-346e-448c-90a2-bdb01a0e9b2d-config\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.350934 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.370410 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.390164 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.432285 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88tf9\" (UniqueName: \"kubernetes.io/projected/97027ce1-2e5b-4186-90fc-089d66251247-kube-api-access-88tf9\") pod \"migrator-59844c95c7-qhkkp\" (UID: \"97027ce1-2e5b-4186-90fc-089d66251247\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.449666 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwbz8\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-kube-api-access-wwbz8\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.459907 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-mwn6z"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.466450 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45vwg\" (UniqueName: \"kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg\") pod \"route-controller-manager-6576b87f9c-p7nsg\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:49 crc kubenswrapper[4723]: W1211 15:23:49.475639 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod542875f8_72b1_4325_bc1d_95f3b2f53efc.slice/crio-150ed9042682c80c8b62c4aad01bb7ad96c89ca34bc3c67b68ae41d1c07a5509 WatchSource:0}: Error finding container 150ed9042682c80c8b62c4aad01bb7ad96c89ca34bc3c67b68ae41d1c07a5509: Status 404 returned error can't find the container with id 150ed9042682c80c8b62c4aad01bb7ad96c89ca34bc3c67b68ae41d1c07a5509 Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.491128 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvp45\" (UniqueName: \"kubernetes.io/projected/7734e148-f74c-4b24-ac4e-02d85b478850-kube-api-access-zvp45\") pod \"machine-config-operator-74547568cd-h2b5l\" (UID: \"7734e148-f74c-4b24-ac4e-02d85b478850\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.512148 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm4gm\" (UniqueName: \"kubernetes.io/projected/154afe62-f77d-4434-9250-6dc1a2a8b252-kube-api-access-rm4gm\") pod \"openshift-config-operator-7777fb866f-n8zjh\" (UID: \"154afe62-f77d-4434-9250-6dc1a2a8b252\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.528661 4723 request.go:700] Waited for 1.939107253s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/serviceaccounts/console/token Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.529950 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2fb65e41-05fd-483d-a2ab-dfa663b0660c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jts4h\" (UID: \"2fb65e41-05fd-483d-a2ab-dfa663b0660c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.580320 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.595648 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9f4f87bd-6c70-4163-a655-957bcc992271-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9nrqn\" (UID: \"9f4f87bd-6c70-4163-a655-957bcc992271\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.609305 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6xpp\" (UniqueName: \"kubernetes.io/projected/b789a0fc-d92b-43f2-bb28-0e522ae80af8-kube-api-access-c6xpp\") pod \"apiserver-76f77b778f-5wvnh\" (UID: \"b789a0fc-d92b-43f2-bb28-0e522ae80af8\") " pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.625479 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9grwr\" (UniqueName: \"kubernetes.io/projected/7593c9bb-a459-41a7-87ed-9715551fb659-kube-api-access-9grwr\") pod \"authentication-operator-69f744f599-fhg8f\" (UID: \"7593c9bb-a459-41a7-87ed-9715551fb659\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.637529 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.647137 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9953bf77-d4f2-4168-81b2-fdb772f44212-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9jlk2\" (UID: \"9953bf77-d4f2-4168-81b2-fdb772f44212\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.667323 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb4p2\" (UniqueName: \"kubernetes.io/projected/bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65-kube-api-access-zb4p2\") pod \"openshift-apiserver-operator-796bbdcf4f-bnb9x\" (UID: \"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.667786 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.688064 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrl2m\" (UniqueName: \"kubernetes.io/projected/a948432a-a38e-4465-9b5d-7b841b06d81f-kube-api-access-xrl2m\") pod \"catalog-operator-68c6474976-5gjkv\" (UID: \"a948432a-a38e-4465-9b5d-7b841b06d81f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.698525 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.703811 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.706900 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnt6r\" (UniqueName: \"kubernetes.io/projected/fd2626c4-8f7b-43ab-93c2-f7f535a400b8-kube-api-access-nnt6r\") pod \"multus-admission-controller-857f4d67dd-jrjdt\" (UID: \"fd2626c4-8f7b-43ab-93c2-f7f535a400b8\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.716868 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.727720 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g746l\" (UniqueName: \"kubernetes.io/projected/53996a35-f507-4f0a-ac6a-b7e2ba7545a8-kube-api-access-g746l\") pod \"service-ca-9c57cc56f-mgx97\" (UID: \"53996a35-f507-4f0a-ac6a-b7e2ba7545a8\") " pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.729120 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.737555 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.743655 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.750476 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.750832 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.753523 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.753635 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbchh\" (UniqueName: \"kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh\") pod \"marketplace-operator-79b997595-565wv\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.757538 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.766330 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.778472 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqx5r\" (UniqueName: \"kubernetes.io/projected/65e96d17-3f27-42cb-a6cc-b911057378ab-kube-api-access-lqx5r\") pod \"router-default-5444994796-hw8r7\" (UID: \"65e96d17-3f27-42cb-a6cc-b911057378ab\") " pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.792347 4723 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.793082 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn9p2\" (UniqueName: \"kubernetes.io/projected/a89db749-53e1-4e74-b58b-2f8f4e990d68-kube-api-access-rn9p2\") pod \"cluster-image-registry-operator-dc59b4c8b-f4qpp\" (UID: \"a89db749-53e1-4e74-b58b-2f8f4e990d68\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.812413 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.838721 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.840826 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.891787 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fhg8f"] Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.894076 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4bd0ccab-0a40-492f-8f39-b451b8c36c1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkmq6\" (UID: \"4bd0ccab-0a40-492f-8f39-b451b8c36c1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.921223 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7jjr\" (UniqueName: \"kubernetes.io/projected/de1f58e2-b8f5-468e-80f7-6476eefc67f0-kube-api-access-x7jjr\") pod \"service-ca-operator-777779d784-q4z7j\" (UID: \"de1f58e2-b8f5-468e-80f7-6476eefc67f0\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.932302 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcsr8\" (UniqueName: \"kubernetes.io/projected/816fbc57-2230-443a-979a-42cecc91e498-kube-api-access-fcsr8\") pod \"olm-operator-6b444d44fb-vc9bf\" (UID: \"816fbc57-2230-443a-979a-42cecc91e498\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.947798 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.961792 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kxk\" (UniqueName: \"kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk\") pod \"oauth-openshift-558db77b4-fr92q\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.974548 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.980470 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l5nx\" (UniqueName: \"kubernetes.io/projected/166ce132-af47-4b71-930e-bd80549c3d3f-kube-api-access-6l5nx\") pod \"machine-config-controller-84d6567774-d78kx\" (UID: \"166ce132-af47-4b71-930e-bd80549c3d3f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.987194 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" Dec 11 15:23:49 crc kubenswrapper[4723]: I1211 15:23:49.992228 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfk5w\" (UniqueName: \"kubernetes.io/projected/cba98b6e-0115-4c3e-801e-23f24e4720d9-kube-api-access-rfk5w\") pod \"etcd-operator-b45778765-q5pwb\" (UID: \"cba98b6e-0115-4c3e-801e-23f24e4720d9\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.014547 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-567jr\" (UniqueName: \"kubernetes.io/projected/4ab833cc-e28e-4c58-8a18-b1891eb69b7f-kube-api-access-567jr\") pod \"kube-storage-version-migrator-operator-b67b599dd-w87r2\" (UID: \"4ab833cc-e28e-4c58-8a18-b1891eb69b7f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.023107 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.031552 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grmrc\" (UniqueName: \"kubernetes.io/projected/7b651358-a4e6-40b5-a8db-f4108332e022-kube-api-access-grmrc\") pod \"control-plane-machine-set-operator-78cbb6b69f-kv42r\" (UID: \"7b651358-a4e6-40b5-a8db-f4108332e022\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.041543 4723 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.041662 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert podName:85180921-c4ba-4b06-9240-4d35a5c57248 nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.04163474 +0000 UTC m=+41.815868175 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert") pod "controller-manager-879f6c89f-7htrb" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248") : failed to sync secret cache: timed out waiting for the condition Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.046108 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gjzk\" (UniqueName: \"kubernetes.io/projected/dd17f73a-62e0-4e24-924c-a449a813f55b-kube-api-access-7gjzk\") pod \"package-server-manager-789f6589d5-kq5tw\" (UID: \"dd17f73a-62e0-4e24-924c-a449a813f55b\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.071276 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.072397 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.077012 4723 projected.go:194] Error preparing data for projected volume kube-api-access-5bh5p for pod openshift-console/downloads-7954f5f757-sjsqg: failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.078384 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p podName:3cabbadb-fc14-4253-a767-d153aa9604bc nodeName:}" failed. No retries permitted until 2025-12-11 15:23:50.578353912 +0000 UTC m=+41.352587337 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-5bh5p" (UniqueName: "kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p") pod "downloads-7954f5f757-sjsqg" (UID: "3cabbadb-fc14-4253-a767-d153aa9604bc") : failed to sync configmap cache: timed out waiting for the condition Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.078478 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kxfd\" (UniqueName: \"kubernetes.io/projected/4aef2280-346e-448c-90a2-bdb01a0e9b2d-kube-api-access-9kxfd\") pod \"console-operator-58897d9998-nxcpx\" (UID: \"4aef2280-346e-448c-90a2-bdb01a0e9b2d\") " pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.077090 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.083235 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.095300 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" event={"ID":"939fb1b2-4cc1-4c31-a3d1-eb748154b875","Type":"ContainerStarted","Data":"13f2a8cd04a78fc91a0989f7909c6cbec0c2ce904d07ff42d2f8bb5b4be38194"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.095361 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" event={"ID":"939fb1b2-4cc1-4c31-a3d1-eb748154b875","Type":"ContainerStarted","Data":"a3183b875307a11386f452c4ee8053114a3581d6bbc86aff0e680ffa540697dd"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.095648 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.101403 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.101850 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.104529 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.115112 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.120983 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.123261 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv"] Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.123551 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l"] Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.123574 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.131619 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.132016 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" event={"ID":"ca6421f1-1c3d-4fbb-bf31-10f7f45f127b","Type":"ContainerStarted","Data":"39f1ea58d68aab86671caee12d2782399ef4fd31f00aae400b720715c323d52b"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.147317 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.152351 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mwn6z" event={"ID":"542875f8-72b1-4325-bc1d-95f3b2f53efc","Type":"ContainerStarted","Data":"019e54de4a1a37f419c83a75e44aa032243527adc1bebb0becf4d6d5661638a3"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.152424 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mwn6z" event={"ID":"542875f8-72b1-4325-bc1d-95f3b2f53efc","Type":"ContainerStarted","Data":"150ed9042682c80c8b62c4aad01bb7ad96c89ca34bc3c67b68ae41d1c07a5509"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.153022 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.167487 4723 generic.go:334] "Generic (PLEG): container finished" podID="ec840a62-9898-474e-bdbf-f92b2f01174b" containerID="ca5541818e412fcd93a5a176ed586999ed45370b48363331142bcab3ca8bd930" exitCode=0 Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.167571 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" event={"ID":"ec840a62-9898-474e-bdbf-f92b2f01174b","Type":"ContainerDied","Data":"ca5541818e412fcd93a5a176ed586999ed45370b48363331142bcab3ca8bd930"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.167605 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" event={"ID":"ec840a62-9898-474e-bdbf-f92b2f01174b","Type":"ContainerStarted","Data":"7d5a63822a47e348df5bad25c8eeaffe7a9fbf9bad0b77f104fe4c962faab52e"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.173319 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.182795 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" event={"ID":"810dc990-b95f-403e-ab68-5c65f34396bf","Type":"ContainerStarted","Data":"061c0fbed63d1da2c14d92c5876a56e706994143f637abcdb35832518f2c6c34"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.182848 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" event={"ID":"810dc990-b95f-403e-ab68-5c65f34396bf","Type":"ContainerStarted","Data":"3a7960e9729c9d46f33a65d67e8f5909141954982d706170c8279d3a7b971f9f"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.182860 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" event={"ID":"810dc990-b95f-403e-ab68-5c65f34396bf","Type":"ContainerStarted","Data":"be6b76f371d29e06102b2dd41dd996a683ab1c182af4f2f83cef88d35549726f"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.184672 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" event={"ID":"7593c9bb-a459-41a7-87ed-9715551fb659","Type":"ContainerStarted","Data":"506a4ff6e08b5c4fe9bdf550fa618bd84c0f07243f9e439e2ad6bf5589a65775"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.185154 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.191992 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.194749 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" event={"ID":"154afe62-f77d-4434-9250-6dc1a2a8b252","Type":"ContainerStarted","Data":"40f8882fefcff37770ff8aff9c32c3814cad47e39d0714a2c4b60d6c87a278d9"} Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.199624 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.210556 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.230907 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.237632 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp"] Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.249695 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.261848 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h"] Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.276524 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9cw8\" (UniqueName: \"kubernetes.io/projected/1dae0577-a799-4ba7-9cc2-f6c38436bae4-kube-api-access-n9cw8\") pod \"console-f9d7485db-dw6bx\" (UID: \"1dae0577-a799-4ba7-9cc2-f6c38436bae4\") " pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375004 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375468 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t75d\" (UniqueName: \"kubernetes.io/projected/e014794a-d5a4-4ced-b54e-55130268ffd2-kube-api-access-4t75d\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375491 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prq97\" (UniqueName: \"kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375549 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375569 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7mt5\" (UniqueName: \"kubernetes.io/projected/59a291e0-48e4-4273-bdd1-1e21c733639e-kube-api-access-c7mt5\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375597 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375666 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375701 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375722 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-socket-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375753 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-webhook-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375775 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-plugins-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375824 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375845 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375873 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smwcw\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375904 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x97h\" (UniqueName: \"kubernetes.io/projected/cecdb332-0935-414b-a3f3-c5ee04a211c9-kube-api-access-4x97h\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.375991 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376015 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-apiservice-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376051 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-certs\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376090 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.376159 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:50.876136578 +0000 UTC m=+41.650370013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376258 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e014794a-d5a4-4ced-b54e-55130268ffd2-metrics-tls\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376386 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvckx\" (UniqueName: \"kubernetes.io/projected/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-kube-api-access-nvckx\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376498 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.376553 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b52hx\" (UniqueName: \"kubernetes.io/projected/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-kube-api-access-b52hx\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.378954 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-mountpoint-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.379017 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e014794a-d5a4-4ced-b54e-55130268ffd2-config-volume\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.379050 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj7sx\" (UniqueName: \"kubernetes.io/projected/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-kube-api-access-nj7sx\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.384414 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-registration-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.384657 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-cert\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.384799 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.385397 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-metrics-tls\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.385505 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.385827 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-csi-data-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.386487 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-node-bootstrap-token\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.386993 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szwrl\" (UniqueName: \"kubernetes.io/projected/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-kube-api-access-szwrl\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.387087 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frp2j\" (UniqueName: \"kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.387232 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: W1211 15:23:50.389147 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b1cbd48_3b8f_4da4_ace3_af94ff1cd03c.slice/crio-27777eee80fb09901002a969b7597c369f88bbf7c5267e04950155ee99cec5d2 WatchSource:0}: Error finding container 27777eee80fb09901002a969b7597c369f88bbf7c5267e04950155ee99cec5d2: Status 404 returned error can't find the container with id 27777eee80fb09901002a969b7597c369f88bbf7c5267e04950155ee99cec5d2 Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.394803 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/cecdb332-0935-414b-a3f3-c5ee04a211c9-tmpfs\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: W1211 15:23:50.422097 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fb65e41_05fd_483d_a2ab_dfa663b0660c.slice/crio-948c4d7f28283d858e491742d1159858104233c461ec3ead730eb609856ff8b3 WatchSource:0}: Error finding container 948c4d7f28283d858e491742d1159858104233c461ec3ead730eb609856ff8b3: Status 404 returned error can't find the container with id 948c4d7f28283d858e491742d1159858104233c461ec3ead730eb609856ff8b3 Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.497532 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.497903 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:50.997870475 +0000 UTC m=+41.772103910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499607 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/cecdb332-0935-414b-a3f3-c5ee04a211c9-tmpfs\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499692 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499750 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t75d\" (UniqueName: \"kubernetes.io/projected/e014794a-d5a4-4ced-b54e-55130268ffd2-kube-api-access-4t75d\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499777 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prq97\" (UniqueName: \"kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499853 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499884 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7mt5\" (UniqueName: \"kubernetes.io/projected/59a291e0-48e4-4273-bdd1-1e21c733639e-kube-api-access-c7mt5\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499912 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.499946 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500037 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-webhook-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500071 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500096 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-socket-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500160 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-plugins-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500187 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500184 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/cecdb332-0935-414b-a3f3-c5ee04a211c9-tmpfs\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500209 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500240 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smwcw\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500300 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x97h\" (UniqueName: \"kubernetes.io/projected/cecdb332-0935-414b-a3f3-c5ee04a211c9-kube-api-access-4x97h\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500340 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500385 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-apiservice-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500408 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-certs\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500449 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500513 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvckx\" (UniqueName: \"kubernetes.io/projected/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-kube-api-access-nvckx\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500541 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e014794a-d5a4-4ced-b54e-55130268ffd2-metrics-tls\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500573 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500624 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b52hx\" (UniqueName: \"kubernetes.io/projected/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-kube-api-access-b52hx\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500716 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-mountpoint-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500740 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e014794a-d5a4-4ced-b54e-55130268ffd2-config-volume\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500766 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj7sx\" (UniqueName: \"kubernetes.io/projected/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-kube-api-access-nj7sx\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500818 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-registration-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500872 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-cert\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500906 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.500996 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-metrics-tls\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501053 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501152 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-csi-data-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501552 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-node-bootstrap-token\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501606 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szwrl\" (UniqueName: \"kubernetes.io/projected/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-kube-api-access-szwrl\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501666 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frp2j\" (UniqueName: \"kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501691 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.501704 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.502275 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.002266113 +0000 UTC m=+41.776499548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.502576 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-plugins-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.502852 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.503522 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-socket-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.504191 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e014794a-d5a4-4ced-b54e-55130268ffd2-config-volume\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.504661 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.506794 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.507933 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-registration-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.511886 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.512783 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-csi-data-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.512824 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.513387 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.515719 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-mountpoint-dir\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.516354 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.516876 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-webhook-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.517709 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cecdb332-0935-414b-a3f3-c5ee04a211c9-apiservice-cert\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.530012 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.530347 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e014794a-d5a4-4ced-b54e-55130268ffd2-metrics-tls\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.530693 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-certs\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.530808 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-cert\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.531145 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-metrics-tls\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.531240 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.531539 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/59a291e0-48e4-4273-bdd1-1e21c733639e-node-bootstrap-token\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.534291 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.587131 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.587777 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prq97\" (UniqueName: \"kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97\") pod \"collect-profiles-29424435-z2fjq\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.605446 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.606058 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bh5p\" (UniqueName: \"kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p\") pod \"downloads-7954f5f757-sjsqg\" (UID: \"3cabbadb-fc14-4253-a767-d153aa9604bc\") " pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.607100 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.107058496 +0000 UTC m=+41.881291931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.624709 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7mt5\" (UniqueName: \"kubernetes.io/projected/59a291e0-48e4-4273-bdd1-1e21c733639e-kube-api-access-c7mt5\") pod \"machine-config-server-jqpj6\" (UID: \"59a291e0-48e4-4273-bdd1-1e21c733639e\") " pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.625862 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smwcw\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.632230 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t75d\" (UniqueName: \"kubernetes.io/projected/e014794a-d5a4-4ced-b54e-55130268ffd2-kube-api-access-4t75d\") pod \"dns-default-s276d\" (UID: \"e014794a-d5a4-4ced-b54e-55130268ffd2\") " pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.642578 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bh5p\" (UniqueName: \"kubernetes.io/projected/3cabbadb-fc14-4253-a767-d153aa9604bc-kube-api-access-5bh5p\") pod \"downloads-7954f5f757-sjsqg\" (UID: \"3cabbadb-fc14-4253-a767-d153aa9604bc\") " pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.642847 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x97h\" (UniqueName: \"kubernetes.io/projected/cecdb332-0935-414b-a3f3-c5ee04a211c9-kube-api-access-4x97h\") pod \"packageserver-d55dfcdfc-hvds8\" (UID: \"cecdb332-0935-414b-a3f3-c5ee04a211c9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.646284 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.670911 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj7sx\" (UniqueName: \"kubernetes.io/projected/b6cbbbf3-7b03-4a74-82c6-147c26fca1d4-kube-api-access-nj7sx\") pod \"dns-operator-744455d44c-tdqkq\" (UID: \"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4\") " pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.674656 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.695872 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frp2j\" (UniqueName: \"kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j\") pod \"cni-sysctl-allowlist-ds-tbf9x\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.709924 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.710475 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.210453062 +0000 UTC m=+41.984686497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.719893 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvckx\" (UniqueName: \"kubernetes.io/projected/a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3-kube-api-access-nvckx\") pod \"csi-hostpathplugin-qm4lg\" (UID: \"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3\") " pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.728035 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b52hx\" (UniqueName: \"kubernetes.io/projected/2d6cbe0b-45fd-4b5d-9e42-c364e8893035-kube-api-access-b52hx\") pod \"cluster-samples-operator-665b6dd947-lsx7q\" (UID: \"2d6cbe0b-45fd-4b5d-9e42-c364e8893035\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.741646 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.750760 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szwrl\" (UniqueName: \"kubernetes.io/projected/9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f-kube-api-access-szwrl\") pod \"ingress-canary-x4pzx\" (UID: \"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f\") " pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.754552 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.770050 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.779143 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.792479 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-s276d" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.804010 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jqpj6" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.812502 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.812769 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.312755109 +0000 UTC m=+42.086988544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.823809 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.836957 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x4pzx" Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.863301 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" Dec 11 15:23:50 crc kubenswrapper[4723]: W1211 15:23:50.871758 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9cb7bb8_2613_4876_aa27_39d58db1aae1.slice/crio-0d92f62263746c0fde935b76e0b81f6a78267251be83ddd675f8389bcccda725 WatchSource:0}: Error finding container 0d92f62263746c0fde935b76e0b81f6a78267251be83ddd675f8389bcccda725: Status 404 returned error can't find the container with id 0d92f62263746c0fde935b76e0b81f6a78267251be83ddd675f8389bcccda725 Dec 11 15:23:50 crc kubenswrapper[4723]: I1211 15:23:50.917145 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:50 crc kubenswrapper[4723]: E1211 15:23:50.917584 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.417569293 +0000 UTC m=+42.191802718 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.018393 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.019324 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.519290904 +0000 UTC m=+42.293524339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.085159 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jrjdt"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.092548 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.120219 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-5wvnh"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.120639 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.120696 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.121162 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.621145569 +0000 UTC m=+42.395379004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.137224 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"controller-manager-879f6c89f-7htrb\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.153487 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mgx97"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.223132 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" event={"ID":"7734e148-f74c-4b24-ac4e-02d85b478850","Type":"ContainerStarted","Data":"551f2828078f2471f16efeca78092b1afeea4efea3613c69de07ce54cf081134"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.226146 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.226225 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.726206049 +0000 UTC m=+42.500439484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.227327 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.227569 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.229273 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.729253701 +0000 UTC m=+42.503487126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.230419 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.232988 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" event={"ID":"7593c9bb-a459-41a7-87ed-9715551fb659","Type":"ContainerStarted","Data":"6ab76f1ecebc5e4b9bbe4ac3ecc29e90d42ce288246ab24fb92b9c2826ad0e7b"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.237400 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.237904 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.248003 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" event={"ID":"154afe62-f77d-4434-9250-6dc1a2a8b252","Type":"ContainerDied","Data":"9ff76b593bead56d704ec306c789f03ba30f52d119032fc81b123dd818c61c9d"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.288315 4723 generic.go:334] "Generic (PLEG): container finished" podID="154afe62-f77d-4434-9250-6dc1a2a8b252" containerID="9ff76b593bead56d704ec306c789f03ba30f52d119032fc81b123dd818c61c9d" exitCode=0 Dec 11 15:23:51 crc kubenswrapper[4723]: W1211 15:23:51.290606 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd2626c4_8f7b_43ab_93c2_f7f535a400b8.slice/crio-57ae1fa257d23afc09c275bad8248743ef677fb500fe76a61e204ae1915c5d50 WatchSource:0}: Error finding container 57ae1fa257d23afc09c275bad8248743ef677fb500fe76a61e204ae1915c5d50: Status 404 returned error can't find the container with id 57ae1fa257d23afc09c275bad8248743ef677fb500fe76a61e204ae1915c5d50 Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.296079 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" event={"ID":"9953bf77-d4f2-4168-81b2-fdb772f44212","Type":"ContainerStarted","Data":"13cf0951190905dd0640eb134d74b59042c260fc50a7655224df403ca74f5562"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.303616 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jqpj6" event={"ID":"59a291e0-48e4-4273-bdd1-1e21c733639e","Type":"ContainerStarted","Data":"2b6f36c01a18442b3479f0a479e60812858dbbd3956a19b6324538be45ebcc0c"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.316155 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hw8r7" event={"ID":"65e96d17-3f27-42cb-a6cc-b911057378ab","Type":"ContainerStarted","Data":"085c5cbf00e562a07586bc45457882d6f4fe0fcfd5e9f7e2a6071c78d242b0ed"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.316235 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hw8r7" event={"ID":"65e96d17-3f27-42cb-a6cc-b911057378ab","Type":"ContainerStarted","Data":"cfc9bfc452d4afe6a8d8557f89cba40c53cb1562d762a8640d46fd674e622f7f"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.323006 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" event={"ID":"2fb65e41-05fd-483d-a2ab-dfa663b0660c","Type":"ContainerStarted","Data":"948c4d7f28283d858e491742d1159858104233c461ec3ead730eb609856ff8b3"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.331348 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.335159 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.835123123 +0000 UTC m=+42.609356558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.377583 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" event={"ID":"97027ce1-2e5b-4186-90fc-089d66251247","Type":"ContainerStarted","Data":"d9fea43bf57e5e1c10635fd813cedfc14e8b72e3c248ddd627ed070a060a1753"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.377624 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" event={"ID":"97027ce1-2e5b-4186-90fc-089d66251247","Type":"ContainerStarted","Data":"6091de5fad81d8234477c5dfa609f17f0056586300e0be02847288126e8280a9"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.389600 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" event={"ID":"b9cb7bb8-2613-4876-aa27-39d58db1aae1","Type":"ContainerStarted","Data":"0d92f62263746c0fde935b76e0b81f6a78267251be83ddd675f8389bcccda725"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.419048 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" event={"ID":"ec840a62-9898-474e-bdbf-f92b2f01174b","Type":"ContainerStarted","Data":"6708057189d3ae1b52a9d6e394de92e2cf9a2ea5dc9055c78806d4006de59a19"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.441066 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.442763 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:51.942751012 +0000 UTC m=+42.716984447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: W1211 15:23:51.463995 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f4f87bd_6c70_4163_a655_957bcc992271.slice/crio-453aee18cc91e55c8ee378908248efd57e7ffb3c846004199f649a7d5f885e0e WatchSource:0}: Error finding container 453aee18cc91e55c8ee378908248efd57e7ffb3c846004199f649a7d5f885e0e: Status 404 returned error can't find the container with id 453aee18cc91e55c8ee378908248efd57e7ffb3c846004199f649a7d5f885e0e Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.496408 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" event={"ID":"a948432a-a38e-4465-9b5d-7b841b06d81f","Type":"ContainerStarted","Data":"1d1e14f5840a28146b8a01fc6e8e287bb92e6fcbde868f47dbfad4db7cef6fe7"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.496466 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" event={"ID":"a948432a-a38e-4465-9b5d-7b841b06d81f","Type":"ContainerStarted","Data":"20b19097342b078cdf91cd276fb3c43fd8f4518a3c41d83d2467aa973b85b022"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.497159 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.525206 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" event={"ID":"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c","Type":"ContainerStarted","Data":"ce5d5ea832f9f9f280551085324bd397b22c8c3842523318fd55eb965fcb2e09"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.525267 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" event={"ID":"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c","Type":"ContainerStarted","Data":"27777eee80fb09901002a969b7597c369f88bbf7c5267e04950155ee99cec5d2"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.525926 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.530089 4723 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-5gjkv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.530557 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" podUID="a948432a-a38e-4465-9b5d-7b841b06d81f" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.541726 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-mwn6z" event={"ID":"542875f8-72b1-4325-bc1d-95f3b2f53efc","Type":"ContainerStarted","Data":"524e5ba834b469f601909f9a41c61666540402816beb728ab4b0935f9dce52af"} Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.553485 4723 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-p7nsg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.553573 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.555310 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.555787 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.055757855 +0000 UTC m=+42.829991300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.556011 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.557321 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.057310777 +0000 UTC m=+42.831544282 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.654587 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-mwn6z" podStartSLOduration=20.654560388 podStartE2EDuration="20.654560388s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:51.631515892 +0000 UTC m=+42.405749327" watchObservedRunningTime="2025-12-11 15:23:51.654560388 +0000 UTC m=+42.428793823" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.657812 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.660421 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.160371914 +0000 UTC m=+42.934605349 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.660844 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.677670 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.685133 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.685350 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.689675 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.691850 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-nxcpx"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.708507 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" podStartSLOduration=19.708480611 podStartE2EDuration="19.708480611s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:51.700929939 +0000 UTC m=+42.475163374" watchObservedRunningTime="2025-12-11 15:23:51.708480611 +0000 UTC m=+42.482714046" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.709344 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.737534 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.741297 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.749126 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fr92q"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.751406 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dw6bx"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.761890 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.763800 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.26378462 +0000 UTC m=+43.038018055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: W1211 15:23:51.770035 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd17f73a_62e0_4e24_924c_a449a813f55b.slice/crio-87d23ea378589a754b7d0c9c593973a8dc76e39ee911ec867f9d51eb64fbbaa9 WatchSource:0}: Error finding container 87d23ea378589a754b7d0c9c593973a8dc76e39ee911ec867f9d51eb64fbbaa9: Status 404 returned error can't find the container with id 87d23ea378589a754b7d0c9c593973a8dc76e39ee911ec867f9d51eb64fbbaa9 Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.819566 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q5pwb"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.835521 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-tdqkq"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.837054 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sjsqg"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.860116 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.860186 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-qm4lg"] Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.865039 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.865369 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.365353138 +0000 UTC m=+43.139586573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.866457 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-tklxz" podStartSLOduration=21.866438587 podStartE2EDuration="21.866438587s" podCreationTimestamp="2025-12-11 15:23:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:51.85348849 +0000 UTC m=+42.627721925" watchObservedRunningTime="2025-12-11 15:23:51.866438587 +0000 UTC m=+42.640672022" Dec 11 15:23:51 crc kubenswrapper[4723]: I1211 15:23:51.978314 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:51 crc kubenswrapper[4723]: E1211 15:23:51.979232 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.479215484 +0000 UTC m=+43.253448919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.025578 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.041077 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:52 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:52 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:52 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.041157 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.081912 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.082525 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.582500896 +0000 UTC m=+43.356734331 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.082869 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.083533 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.583524973 +0000 UTC m=+43.357758408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.110909 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xtfgh" podStartSLOduration=21.110882775 podStartE2EDuration="21.110882775s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.108976994 +0000 UTC m=+42.883210429" watchObservedRunningTime="2025-12-11 15:23:52.110882775 +0000 UTC m=+42.885116210" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.149657 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-hw8r7" podStartSLOduration=21.149637972 podStartE2EDuration="21.149637972s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.147951367 +0000 UTC m=+42.922184802" watchObservedRunningTime="2025-12-11 15:23:52.149637972 +0000 UTC m=+42.923871407" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.184440 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.186869 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.686843067 +0000 UTC m=+43.461076502 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.193858 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x4pzx"] Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.214322 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q"] Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.241426 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-s276d"] Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.258169 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.258230 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8"] Dec 11 15:23:52 crc kubenswrapper[4723]: W1211 15:23:52.260480 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a9fea4f_8828_4ea9_8b0c_a77ed4fbd07f.slice/crio-e52671e5d46570a0c4e5a5b4d7d37aeda39b0c4541aa862a4d5b9a7fb03d796c WatchSource:0}: Error finding container e52671e5d46570a0c4e5a5b4d7d37aeda39b0c4541aa862a4d5b9a7fb03d796c: Status 404 returned error can't find the container with id e52671e5d46570a0c4e5a5b4d7d37aeda39b0c4541aa862a4d5b9a7fb03d796c Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.286516 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" podStartSLOduration=21.286491613 podStartE2EDuration="21.286491613s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.285396193 +0000 UTC m=+43.059629628" watchObservedRunningTime="2025-12-11 15:23:52.286491613 +0000 UTC m=+43.060725058" Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.287211 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.787187981 +0000 UTC m=+43.561421416 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.286741 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.363113 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-fhg8f" podStartSLOduration=21.363095112 podStartE2EDuration="21.363095112s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.361579491 +0000 UTC m=+43.135812936" watchObservedRunningTime="2025-12-11 15:23:52.363095112 +0000 UTC m=+43.137328537" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.392684 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.393049 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.893033673 +0000 UTC m=+43.667267108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.494576 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.495412 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:52.9953316 +0000 UTC m=+43.769565045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.523625 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-ndhbx" podStartSLOduration=20.523606576 podStartE2EDuration="20.523606576s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.523078192 +0000 UTC m=+43.297311627" watchObservedRunningTime="2025-12-11 15:23:52.523606576 +0000 UTC m=+43.297840011" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.554729 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" event={"ID":"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65","Type":"ContainerStarted","Data":"46c8fa3e2eeea676ad9b674a9b3b26cb9dfe5caf82526bd276617576aa006e2e"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.554789 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" event={"ID":"bcfce2ca-9f37-4a2b-ada6-a2cf61d65f65","Type":"ContainerStarted","Data":"c9ef0c4b8c34d9eef09a6b252971e687e631352f688c57fd188dc0932cd08995"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.568430 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" event={"ID":"816fbc57-2230-443a-979a-42cecc91e498","Type":"ContainerStarted","Data":"232cc122b7075b50f0aae51ea42fff818480cb1285e42db8d78f1547f563c226"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.598837 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.599306 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.099285791 +0000 UTC m=+43.873519226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.600322 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" event={"ID":"b9cb7bb8-2613-4876-aa27-39d58db1aae1","Type":"ContainerStarted","Data":"e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.601020 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.605109 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" podStartSLOduration=20.605082886 podStartE2EDuration="20.605082886s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.59815266 +0000 UTC m=+43.372386095" watchObservedRunningTime="2025-12-11 15:23:52.605082886 +0000 UTC m=+43.379316321" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.614502 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" event={"ID":"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4","Type":"ContainerStarted","Data":"3da7daa6877803b734a0fad704cebf541beffa6267a742a0c40993fe06712e9a"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.633730 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" podStartSLOduration=20.633710842 podStartE2EDuration="20.633710842s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.631524843 +0000 UTC m=+43.405758278" watchObservedRunningTime="2025-12-11 15:23:52.633710842 +0000 UTC m=+43.407944277" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.670794 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" event={"ID":"166ce132-af47-4b71-930e-bd80549c3d3f","Type":"ContainerStarted","Data":"da5e0cbeda27dc52713178dd5d02939f637f80f224bb203df694b76db0173e8c"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.675533 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jqpj6" event={"ID":"59a291e0-48e4-4273-bdd1-1e21c733639e","Type":"ContainerStarted","Data":"df15f1bf87da0a8c58833703ee83a8159d786bdb8be87d6832b608b2ec0d2900"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.678452 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" event={"ID":"cba98b6e-0115-4c3e-801e-23f24e4720d9","Type":"ContainerStarted","Data":"f17c6bbbf92d80e2ac64fd5fd12e8d01ad58027e294ad846e1f754b48af7be68"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.679386 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" event={"ID":"dd17f73a-62e0-4e24-924c-a449a813f55b","Type":"ContainerStarted","Data":"87d23ea378589a754b7d0c9c593973a8dc76e39ee911ec867f9d51eb64fbbaa9"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.680407 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" event={"ID":"2d6cbe0b-45fd-4b5d-9e42-c364e8893035","Type":"ContainerStarted","Data":"8dfa11ae88bd6c0f2f3d77f10c051d954b67ca0ff1416375d6e2d262134b6a1b"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.681306 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" event={"ID":"de1f58e2-b8f5-468e-80f7-6476eefc67f0","Type":"ContainerStarted","Data":"39a7d830748cd138a8e122f5fdf62261d0eae311f84dd975de66b95710fb5757"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.687261 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.690253 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" event={"ID":"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3","Type":"ContainerStarted","Data":"274b8fecd1c15647dc2de14e27a50f8fe47a83e096e789674e2935c11b9180ce"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.695609 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sjsqg" event={"ID":"3cabbadb-fc14-4253-a767-d153aa9604bc","Type":"ContainerStarted","Data":"af1fb4b8465fa4a7b459b52424c0bebf556a1d14bf49e6a0560aa5706a004f78"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.697594 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" event={"ID":"4bd0ccab-0a40-492f-8f39-b451b8c36c1c","Type":"ContainerStarted","Data":"8d8d1b5b9adeb85d573d5b9842c9af1826972b9ed4f48cb784642682bf407f44"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.698837 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s276d" event={"ID":"e014794a-d5a4-4ced-b54e-55130268ffd2","Type":"ContainerStarted","Data":"591747fc28dd88dd06bd66ba458fd707bf72232ba5f9a91571a84c2a3e4e28ed"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.700548 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.703370 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.203354035 +0000 UTC m=+43.977587470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.718596 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" event={"ID":"9f4f87bd-6c70-4163-a655-957bcc992271","Type":"ContainerStarted","Data":"453aee18cc91e55c8ee378908248efd57e7ffb3c846004199f649a7d5f885e0e"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.735582 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" event={"ID":"9953bf77-d4f2-4168-81b2-fdb772f44212","Type":"ContainerStarted","Data":"a820002d63fb006a1c19e3402098879d3bc67cd76994594bed5c1a79ee60212c"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.735659 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" event={"ID":"9953bf77-d4f2-4168-81b2-fdb772f44212","Type":"ContainerStarted","Data":"cdddb69d80eb68b5160071287537d5437784e769fc8c1b5e0a5c91c42e0dbea4"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.753581 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" event={"ID":"7b651358-a4e6-40b5-a8db-f4108332e022","Type":"ContainerStarted","Data":"96b24e91073d17e744eebdca0bd3d384d15b43b3efb038b84fe528d1ef865a3a"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.766716 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" event={"ID":"4aef2280-346e-448c-90a2-bdb01a0e9b2d","Type":"ContainerStarted","Data":"5df9eca96f75b882d395350e96ed28e18b3c5c92d181272ae7e8c413a8c8215d"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.774087 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jts4h" event={"ID":"2fb65e41-05fd-483d-a2ab-dfa663b0660c","Type":"ContainerStarted","Data":"795f027ececbbb14929827c444a04c1990ad077472254c215b2c37300c90bd11"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.799985 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" event={"ID":"0008032b-faa4-4c7a-87ea-5ede94bc0229","Type":"ContainerStarted","Data":"d80b35ae5c2252f9a7a855dce2875aa5c4e0d9307ec1355edbab42fb2ea6ede6"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.802223 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.802433 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.302404664 +0000 UTC m=+44.076638109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.802715 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.805253 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.30524293 +0000 UTC m=+44.079476365 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.824653 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" event={"ID":"a89db749-53e1-4e74-b58b-2f8f4e990d68","Type":"ContainerStarted","Data":"13995daf69b31ae58831c0b89cd14ef845451c6004857155956cdfc45101e0cf"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.836601 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" event={"ID":"878f7948-c80f-4d35-82f0-0eb16b515ac8","Type":"ContainerStarted","Data":"842140101cd610efd1790b0667a747dd249ddf3802da7010eae4d9ec8c57404c"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.844539 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" event={"ID":"97027ce1-2e5b-4186-90fc-089d66251247","Type":"ContainerStarted","Data":"6831da0e681c9ff3b960eb771a696894614656c7c5389d84597f50fa74d9f37a"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.858833 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" event={"ID":"85180921-c4ba-4b06-9240-4d35a5c57248","Type":"ContainerStarted","Data":"b4f25a96fc42042eb81217e4e4ce3f7ec97d5e93102b4d2b22be94e890f97f39"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.875041 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9jlk2" podStartSLOduration=21.875023407 podStartE2EDuration="21.875023407s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.870385943 +0000 UTC m=+43.644619388" watchObservedRunningTime="2025-12-11 15:23:52.875023407 +0000 UTC m=+43.649256842" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.887108 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" event={"ID":"154afe62-f77d-4434-9250-6dc1a2a8b252","Type":"ContainerStarted","Data":"503af70d165273d3dd6956dd17902b2cc5a633b1467772754052d67e2899772e"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.887958 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.904567 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:52 crc kubenswrapper[4723]: E1211 15:23:52.906134 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.406114199 +0000 UTC m=+44.180347644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.907394 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" event={"ID":"a7b47ae1-3d79-42e6-b55a-4021723e74d5","Type":"ContainerStarted","Data":"9502810f0261b67508e3de4d728a242f3f82516c679a6d4bde46cb56f2558183"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.907441 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" event={"ID":"a7b47ae1-3d79-42e6-b55a-4021723e74d5","Type":"ContainerStarted","Data":"babeca3dfc87e06f80dfff2044a00af8910f47a5d8eb554a50d75803c0593bac"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.909631 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.913518 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podStartSLOduration=5.9134979659999996 podStartE2EDuration="5.913497966s" podCreationTimestamp="2025-12-11 15:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.909786257 +0000 UTC m=+43.684019692" watchObservedRunningTime="2025-12-11 15:23:52.913497966 +0000 UTC m=+43.687731391" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.936229 4723 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-565wv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.936275 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.936277 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dw6bx" event={"ID":"1dae0577-a799-4ba7-9cc2-f6c38436bae4","Type":"ContainerStarted","Data":"b9d0c73ad69d22be8841467efa7b08ea4262aa52feb12b3e6fc05e089c7e9f0e"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.939557 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x4pzx" event={"ID":"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f","Type":"ContainerStarted","Data":"e52671e5d46570a0c4e5a5b4d7d37aeda39b0c4541aa862a4d5b9a7fb03d796c"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.941309 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" event={"ID":"cecdb332-0935-414b-a3f3-c5ee04a211c9","Type":"ContainerStarted","Data":"db36a8707892075e90a999133618cabd6c6b51129d3b69977d023167a39eeada"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.958765 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-jqpj6" podStartSLOduration=5.958746637 podStartE2EDuration="5.958746637s" podCreationTimestamp="2025-12-11 15:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.958648124 +0000 UTC m=+43.732881559" watchObservedRunningTime="2025-12-11 15:23:52.958746637 +0000 UTC m=+43.732980072" Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.960563 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" event={"ID":"53996a35-f507-4f0a-ac6a-b7e2ba7545a8","Type":"ContainerStarted","Data":"9050d8ed8dcfabb5b0c5d6360adf717a9a57db57cbabe0ff4cb40570149f926b"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.960601 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" event={"ID":"53996a35-f507-4f0a-ac6a-b7e2ba7545a8","Type":"ContainerStarted","Data":"92beed10a6217b35c8540ad82daba3bedf248c265df0615fdb2927b85c087406"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.970412 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" event={"ID":"4ab833cc-e28e-4c58-8a18-b1891eb69b7f","Type":"ContainerStarted","Data":"157335ccfc62cbe9518f73497bee4ca88827882935420d6dcfdeb6073723cff1"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.993525 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" event={"ID":"7734e148-f74c-4b24-ac4e-02d85b478850","Type":"ContainerStarted","Data":"d4933a5c8c64533aa6d934defe9c896f06bce1acc043cb433d30252d4874dedb"} Dec 11 15:23:52 crc kubenswrapper[4723]: I1211 15:23:52.993587 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" event={"ID":"7734e148-f74c-4b24-ac4e-02d85b478850","Type":"ContainerStarted","Data":"ffbf63f983fe31668a31464b55f18a8017dc8de0bfbdffd09d95bcde2aaafd19"} Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.007063 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.009420 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.509408762 +0000 UTC m=+44.283642197 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.014000 4723 generic.go:334] "Generic (PLEG): container finished" podID="b789a0fc-d92b-43f2-bb28-0e522ae80af8" containerID="e8347652ab0a5debd8cdbbd1acaa38807ed7845fae61499e71fa1d8ed8917167" exitCode=0 Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.014084 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" event={"ID":"b789a0fc-d92b-43f2-bb28-0e522ae80af8","Type":"ContainerDied","Data":"e8347652ab0a5debd8cdbbd1acaa38807ed7845fae61499e71fa1d8ed8917167"} Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.014112 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" event={"ID":"b789a0fc-d92b-43f2-bb28-0e522ae80af8","Type":"ContainerStarted","Data":"de148bcefc73342f0b4cc79d17f4a340ca9ddf0e5cfbcabc659e1f244b050d1c"} Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.034869 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" podStartSLOduration=22.034850723 podStartE2EDuration="22.034850723s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.030408234 +0000 UTC m=+43.804641669" watchObservedRunningTime="2025-12-11 15:23:53.034850723 +0000 UTC m=+43.809084158" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.035710 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bnb9x" podStartSLOduration=22.035702695 podStartE2EDuration="22.035702695s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:52.993081085 +0000 UTC m=+43.767314520" watchObservedRunningTime="2025-12-11 15:23:53.035702695 +0000 UTC m=+43.809936130" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.040307 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" event={"ID":"fd2626c4-8f7b-43ab-93c2-f7f535a400b8","Type":"ContainerStarted","Data":"c134a869c70b88e8950d9c10390b7e938140d1709abb4813d8620954abc6f8cd"} Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.040362 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" event={"ID":"fd2626c4-8f7b-43ab-93c2-f7f535a400b8","Type":"ContainerStarted","Data":"57ae1fa257d23afc09c275bad8248743ef677fb500fe76a61e204ae1915c5d50"} Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.047649 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:53 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:53 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:53 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.047704 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.048282 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5gjkv" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.057685 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.077380 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-mgx97" podStartSLOduration=21.07736169 podStartE2EDuration="21.07736169s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.075694885 +0000 UTC m=+43.849928320" watchObservedRunningTime="2025-12-11 15:23:53.07736169 +0000 UTC m=+43.851595115" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.109508 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.115565 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.615537611 +0000 UTC m=+44.389771106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.200757 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" podStartSLOduration=22.20072402 podStartE2EDuration="22.20072402s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.145133633 +0000 UTC m=+43.919367088" watchObservedRunningTime="2025-12-11 15:23:53.20072402 +0000 UTC m=+43.974957455" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.211633 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.212197 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.712176846 +0000 UTC m=+44.486410281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.238538 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qhkkp" podStartSLOduration=21.238502191 podStartE2EDuration="21.238502191s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.201317936 +0000 UTC m=+43.975551391" watchObservedRunningTime="2025-12-11 15:23:53.238502191 +0000 UTC m=+44.012735626" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.240887 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podStartSLOduration=21.240876704 podStartE2EDuration="21.240876704s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.237287898 +0000 UTC m=+44.011521333" watchObservedRunningTime="2025-12-11 15:23:53.240876704 +0000 UTC m=+44.015110139" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.326704 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.327302 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.827269945 +0000 UTC m=+44.601503560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.350903 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.351268 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.365068 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-h2b5l" podStartSLOduration=22.365034786 podStartE2EDuration="22.365034786s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:53.344665001 +0000 UTC m=+44.118898436" watchObservedRunningTime="2025-12-11 15:23:53.365034786 +0000 UTC m=+44.139268221" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.385848 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.437799 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.438291 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:53.938270865 +0000 UTC m=+44.712504300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.539721 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.540357 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.040337035 +0000 UTC m=+44.814570480 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.615561 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.619912 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.620063 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.625783 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.642802 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.652957 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.152936368 +0000 UTC m=+44.927169803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.745607 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.745994 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.746109 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hmzt\" (UniqueName: \"kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.746192 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.746320 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.246301905 +0000 UTC m=+45.020535340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.769286 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.770366 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.775450 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.788197 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847533 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847601 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq4hn\" (UniqueName: \"kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847693 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847714 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847858 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.847896 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hmzt\" (UniqueName: \"kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.848025 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.850003 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.349988689 +0000 UTC m=+45.124222124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.850685 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.850817 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.873074 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-tbf9x"] Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.891402 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hmzt\" (UniqueName: \"kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt\") pod \"certified-operators-wk4hd\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.950736 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.950895 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.450852617 +0000 UTC m=+45.225086052 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.951385 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.951432 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.951553 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.952340 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.951731 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq4hn\" (UniqueName: \"kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.956213 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:53 crc kubenswrapper[4723]: E1211 15:23:53.956833 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.456801417 +0000 UTC m=+45.231034852 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:53 crc kubenswrapper[4723]: I1211 15:23:53.975239 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq4hn\" (UniqueName: \"kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn\") pod \"community-operators-hqpzz\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.028734 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:54 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:54 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:54 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.029112 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.046663 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" event={"ID":"dd17f73a-62e0-4e24-924c-a449a813f55b","Type":"ContainerStarted","Data":"525e5b9b1f21e9a6b3ab101974088f2beb50430c1a74f12cc0f95d167d08d260"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.047679 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" event={"ID":"a89db749-53e1-4e74-b58b-2f8f4e990d68","Type":"ContainerStarted","Data":"db5074656fa7cc7769efa2dd033463300e90023ffef441106f75b8d825679ffa"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.048901 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" event={"ID":"878f7948-c80f-4d35-82f0-0eb16b515ac8","Type":"ContainerStarted","Data":"427a1e278b9f717bc18cd84cc1f542c4b33e21237fd07c51041013acdca07250"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.050871 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" event={"ID":"2d6cbe0b-45fd-4b5d-9e42-c364e8893035","Type":"ContainerStarted","Data":"9eefe0df709b86622b07d801eee0237447f094ac615eb32e8e28802724153a27"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.053282 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" event={"ID":"816fbc57-2230-443a-979a-42cecc91e498","Type":"ContainerStarted","Data":"e156c4c3bb26a2c598bfd431b3cb9e6c19eed49aba9ecc8f46802e8a239c8fd7"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.053862 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.055404 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dw6bx" event={"ID":"1dae0577-a799-4ba7-9cc2-f6c38436bae4","Type":"ContainerStarted","Data":"8affe628819be9b993fb5937c00cebded00fe418d93d8f54600d29a47c258de9"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.058798 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.059088 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.559070472 +0000 UTC m=+45.333303907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.059309 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.059728 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.55971428 +0000 UTC m=+45.333947715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.129266 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.158046 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.161248 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.161435 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.66139599 +0000 UTC m=+45.435629425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.163269 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.163665 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.66365516 +0000 UTC m=+45.437888595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.265609 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.265931 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.765881895 +0000 UTC m=+45.540115330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.266708 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.267217 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.76720157 +0000 UTC m=+45.541435225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.369078 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.369592 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.869555068 +0000 UTC m=+45.643788503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.369687 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.370415 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.870403371 +0000 UTC m=+45.644636806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.472398 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.472865 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.972833661 +0000 UTC m=+45.747067096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.473042 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.473509 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:54.973496239 +0000 UTC m=+45.747729674 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.575148 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.575371 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.075332043 +0000 UTC m=+45.849565488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.576023 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.576634 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.076614548 +0000 UTC m=+45.850847993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.677872 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.678359 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.178339779 +0000 UTC m=+45.952573214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.679014 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.679940 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.17985919 +0000 UTC m=+45.954092625 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.781639 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.782085 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.282020493 +0000 UTC m=+46.056253928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.782230 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.847430 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x4pzx" event={"ID":"9a9fea4f-8828-4ea9-8b0c-a77ed4fbd07f","Type":"ContainerStarted","Data":"617d7b6887aa869ac097f83760a067b3c5448593f2dcfcfca8fd05fcd35eeaad"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849086 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849137 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" event={"ID":"4bd0ccab-0a40-492f-8f39-b451b8c36c1c","Type":"ContainerStarted","Data":"cdfa0940d2fef7b0d78dc33b6dd347e4ef6aeed5b26774f1c4b41c02ba898bdb"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849161 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-w87r2" event={"ID":"4ab833cc-e28e-4c58-8a18-b1891eb69b7f","Type":"ContainerStarted","Data":"49cf4deebb1529edeff390eaf7813c542069c387d3a7a294960453c55348e13d"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849214 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" event={"ID":"166ce132-af47-4b71-930e-bd80549c3d3f","Type":"ContainerStarted","Data":"c984f1bc4d7686c0d175bf250e7790b95941b5dfbe00d894214c1d6bdcaa9dc9"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849227 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" event={"ID":"de1f58e2-b8f5-468e-80f7-6476eefc67f0","Type":"ContainerStarted","Data":"eabedd23c730a859697ab525085029d3d26d1dfc48ca462074a19991f60ba06b"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849251 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" event={"ID":"9f4f87bd-6c70-4163-a655-957bcc992271","Type":"ContainerStarted","Data":"32720bed45c92bad402bd00763124525d97bccd86bcb840ee4d6625e1118672c"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849262 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" event={"ID":"4aef2280-346e-448c-90a2-bdb01a0e9b2d","Type":"ContainerStarted","Data":"c4698f036e6fd142a958fca19de8d7973d558cbc269789ffc8b221e72a08c79e"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.849278 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sjsqg" event={"ID":"3cabbadb-fc14-4253-a767-d153aa9604bc","Type":"ContainerStarted","Data":"7af8b4b73b9ac0926e694526c781d7d472e4ba6f35cff7b3a8c1ce3b23767282"} Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.864918 4723 patch_prober.go:28] interesting pod/console-operator-58897d9998-nxcpx container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/readyz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.865031 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" podUID="4aef2280-346e-448c-90a2-bdb01a0e9b2d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/readyz\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 11 15:23:54 crc kubenswrapper[4723]: E1211 15:23:54.884806 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.384771591 +0000 UTC m=+46.159005016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.887555 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.899091 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dsn9s"] Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.900651 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.930758 4723 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-565wv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.930827 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.955568 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dsn9s"] Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.960809 4723 patch_prober.go:28] interesting pod/downloads-7954f5f757-sjsqg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.961364 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sjsqg" podUID="3cabbadb-fc14-4253-a767-d153aa9604bc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 11 15:23:54 crc kubenswrapper[4723]: I1211 15:23:54.976289 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nf5xs" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:54.985830 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:54.988510 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.488483116 +0000 UTC m=+46.262716551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:54.999314 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:54.999523 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-n8zjh" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.015773 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" podStartSLOduration=24.015743675 podStartE2EDuration="24.015743675s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:54.063917092 +0000 UTC m=+44.838150527" watchObservedRunningTime="2025-12-11 15:23:55.015743675 +0000 UTC m=+45.789977110" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.043769 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-x4pzx" podStartSLOduration=8.043745924 podStartE2EDuration="8.043745924s" podCreationTimestamp="2025-12-11 15:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:54.081117812 +0000 UTC m=+44.855351257" watchObservedRunningTime="2025-12-11 15:23:55.043745924 +0000 UTC m=+45.817979359" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.044793 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-vc9bf" podStartSLOduration=23.044785492 podStartE2EDuration="23.044785492s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:54.10083634 +0000 UTC m=+44.875069785" watchObservedRunningTime="2025-12-11 15:23:55.044785492 +0000 UTC m=+45.819018927" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.060147 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:55 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:55 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:55 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.060210 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.089912 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.090078 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5rvb\" (UniqueName: \"kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.090138 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.090195 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.090705 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.59069023 +0000 UTC m=+46.364923665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.103070 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkmq6" podStartSLOduration=24.103043991 podStartE2EDuration="24.103043991s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:54.12029434 +0000 UTC m=+44.894527775" watchObservedRunningTime="2025-12-11 15:23:55.103043991 +0000 UTC m=+45.877277426" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.153094 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gb8rc"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.155229 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.165898 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q4z7j" podStartSLOduration=23.165876421 podStartE2EDuration="23.165876421s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:54.975143819 +0000 UTC m=+45.749377264" watchObservedRunningTime="2025-12-11 15:23:55.165876421 +0000 UTC m=+45.940109856" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.169081 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gb8rc"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.180847 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.191339 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9nrqn" podStartSLOduration=24.191310032 podStartE2EDuration="24.191310032s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.055291313 +0000 UTC m=+45.829524748" watchObservedRunningTime="2025-12-11 15:23:55.191310032 +0000 UTC m=+45.965543467" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.191808 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.192054 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.192125 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.192197 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5rvb\" (UniqueName: \"kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.192701 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.692678178 +0000 UTC m=+46.466911613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.193246 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.193463 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.206624 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" podStartSLOduration=24.206597131 podStartE2EDuration="24.206597131s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.12395626 +0000 UTC m=+45.898189695" watchObservedRunningTime="2025-12-11 15:23:55.206597131 +0000 UTC m=+45.980830566" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.230335 4723 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hvds8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.230435 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" podUID="cecdb332-0935-414b-a3f3-c5ee04a211c9" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": dial tcp 10.217.0.24:5443: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.273621 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" event={"ID":"cba98b6e-0115-4c3e-801e-23f24e4720d9","Type":"ContainerStarted","Data":"a64c96818f06c222afb4cb38f8773d64e6a1cbe03049320638c1a28136169b8f"} Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.274035 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5rvb\" (UniqueName: \"kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb\") pod \"certified-operators-dsn9s\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.299527 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.304153 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.304282 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.304360 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.304450 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdjq4\" (UniqueName: \"kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.304868 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.804852169 +0000 UTC m=+46.579085604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.320690 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.329740 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.369189 4723 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-fr92q container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.369268 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.32:6443/healthz\": dial tcp 10.217.0.32:6443: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.370276 4723 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7htrb container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.370344 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.374100 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.392414 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.405756 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.405867 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.905850581 +0000 UTC m=+46.680084016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.406176 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdjq4\" (UniqueName: \"kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.406241 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.406296 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.406478 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.410743 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.411647 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.412301 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:55.912283583 +0000 UTC m=+46.686517018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.470602 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.490455 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.519616 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-sjsqg" podStartSLOduration=24.492297534 podStartE2EDuration="24.492297534s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.397125838 +0000 UTC m=+46.171359273" watchObservedRunningTime="2025-12-11 15:23:55.492297534 +0000 UTC m=+46.266530969" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.519700 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.520647 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghcnp\" (UniqueName: \"kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.520854 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.527186 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.027141026 +0000 UTC m=+46.801374461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.531566 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdjq4\" (UniqueName: \"kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4\") pod \"community-operators-gb8rc\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.535985 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.536097 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.536726 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.036702002 +0000 UTC m=+46.810935437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.643699 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q5pwb" podStartSLOduration=24.643683993 podStartE2EDuration="24.643683993s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.593897891 +0000 UTC m=+46.368131326" watchObservedRunningTime="2025-12-11 15:23:55.643683993 +0000 UTC m=+46.417917428" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.645754 4723 patch_prober.go:28] interesting pod/console-operator-58897d9998-nxcpx container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/readyz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.645786 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" podUID="4aef2280-346e-448c-90a2-bdb01a0e9b2d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/readyz\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.646581 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" gracePeriod=30 Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.648820 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.649073 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.649109 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.649194 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghcnp\" (UniqueName: \"kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.649602 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.649744 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.149719494 +0000 UTC m=+46.923952969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.649985 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.655787 4723 patch_prober.go:28] interesting pod/downloads-7954f5f757-sjsqg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.655855 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sjsqg" podUID="3cabbadb-fc14-4253-a767-d153aa9604bc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.687225 4723 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-565wv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.687290 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.691869 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghcnp\" (UniqueName: \"kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp\") pod \"redhat-marketplace-px8w4\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.707722 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.716664 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" podStartSLOduration=24.716641124 podStartE2EDuration="24.716641124s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.706193225 +0000 UTC m=+46.480426660" watchObservedRunningTime="2025-12-11 15:23:55.716641124 +0000 UTC m=+46.490874559" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.754448 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" podStartSLOduration=23.754432875 podStartE2EDuration="23.754432875s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.753661375 +0000 UTC m=+46.527894810" watchObservedRunningTime="2025-12-11 15:23:55.754432875 +0000 UTC m=+46.528666310" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.754602 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.756213 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.256196873 +0000 UTC m=+47.030430298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.779930 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nfg4x"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.781044 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfg4x"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.781078 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.781172 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.862557 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.863239 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.363196795 +0000 UTC m=+47.137430230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.863290 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.863364 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxmp7\" (UniqueName: \"kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.863408 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.863494 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.863804 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.363788901 +0000 UTC m=+47.138022336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.912664 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.964861 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.965150 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.465087871 +0000 UTC m=+47.239321306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.965237 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.965290 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.965356 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxmp7\" (UniqueName: \"kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.965400 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.965988 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:55 crc kubenswrapper[4723]: E1211 15:23:55.966355 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.466338034 +0000 UTC m=+47.240571469 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:55 crc kubenswrapper[4723]: I1211 15:23:55.966618 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.034360 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxmp7\" (UniqueName: \"kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7\") pod \"redhat-marketplace-nfg4x\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.041717 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:56 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:56 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:56 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.041791 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.041929 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" podStartSLOduration=25.041905996 podStartE2EDuration="25.041905996s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:55.942518767 +0000 UTC m=+46.716752212" watchObservedRunningTime="2025-12-11 15:23:56.041905996 +0000 UTC m=+46.816139431" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.067371 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.079663 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.579619155 +0000 UTC m=+47.353852590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.114287 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-f4qpp" podStartSLOduration=25.114268192 podStartE2EDuration="25.114268192s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.113749588 +0000 UTC m=+46.887983023" watchObservedRunningTime="2025-12-11 15:23:56.114268192 +0000 UTC m=+46.888501617" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.183796 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.184004 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" podStartSLOduration=25.183940545 podStartE2EDuration="25.183940545s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.166336244 +0000 UTC m=+46.940569679" watchObservedRunningTime="2025-12-11 15:23:56.183940545 +0000 UTC m=+46.958173980" Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.184349 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.684328876 +0000 UTC m=+47.458562311 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.206530 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.262357 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dw6bx" podStartSLOduration=25.262317352 podStartE2EDuration="25.262317352s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.235653119 +0000 UTC m=+47.009886554" watchObservedRunningTime="2025-12-11 15:23:56.262317352 +0000 UTC m=+47.036550787" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.263267 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.287367 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.288024 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.788004609 +0000 UTC m=+47.562238044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.388952 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.389464 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.889448173 +0000 UTC m=+47.663681608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.483862 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dsn9s"] Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.490654 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.491139 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:56.991117303 +0000 UTC m=+47.765350738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.597232 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.598117 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.098100765 +0000 UTC m=+47.872334200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.700884 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.701211 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.201169882 +0000 UTC m=+47.975403317 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.702159 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.708762 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.208737615 +0000 UTC m=+47.982971050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.731548 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" event={"ID":"b789a0fc-d92b-43f2-bb28-0e522ae80af8","Type":"ContainerStarted","Data":"7e77e7099b763be84cf4765083f4af8ffdbcf31d8d62ae46fd0e924e8e79f28d"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.739631 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" event={"ID":"7b651358-a4e6-40b5-a8db-f4108332e022","Type":"ContainerStarted","Data":"61748d9deb8e647f1a37d70d622d6be0e8830b7b394916d18c1f95ea53f1e163"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.746627 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" event={"ID":"2d6cbe0b-45fd-4b5d-9e42-c364e8893035","Type":"ContainerStarted","Data":"e6ed126aa43ee474f80e7cbb3f7010267a5b26a4aba93175c21b9779ba95506f"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.766194 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerStarted","Data":"762cb9d78eb716d6043f58d446b1cdb03345926d52b5655274bf367eeb0b3ed7"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.778086 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" event={"ID":"0008032b-faa4-4c7a-87ea-5ede94bc0229","Type":"ContainerStarted","Data":"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.785695 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kv42r" podStartSLOduration=24.785667173 podStartE2EDuration="24.785667173s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.784797269 +0000 UTC m=+47.559030704" watchObservedRunningTime="2025-12-11 15:23:56.785667173 +0000 UTC m=+47.559900608" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.789562 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" event={"ID":"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4","Type":"ContainerStarted","Data":"0e48ea0af82a8b19029b821f65cdf44ab5b026ad700b8a470b4aae711597b509"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.818829 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.820212 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.320190546 +0000 UTC m=+48.094423981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.828405 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" event={"ID":"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3","Type":"ContainerStarted","Data":"5d7b8c03cad245b32b66680fe42904f92fc8bfaf45a7f95973c3dcffc620eef0"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.858028 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerStarted","Data":"859e766eb6fb19adfed656f984c0956b9bd1fccfaf8ba20d6a95818c694c0311"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.864043 4723 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.876876 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lsx7q" podStartSLOduration=26.876843582 podStartE2EDuration="26.876843582s" podCreationTimestamp="2025-12-11 15:23:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.855991514 +0000 UTC m=+47.630224949" watchObservedRunningTime="2025-12-11 15:23:56.876843582 +0000 UTC m=+47.651077017" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.894726 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" event={"ID":"cecdb332-0935-414b-a3f3-c5ee04a211c9","Type":"ContainerStarted","Data":"afe60ec1b2b0bb6641b250f9a87fbb7bff10f0f5b8f0627acbb737a9b533538d"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.925779 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" event={"ID":"fd2626c4-8f7b-43ab-93c2-f7f535a400b8","Type":"ContainerStarted","Data":"471010e8a896391b4643125799e2ff77b827738b80a11fff0a2e524c366b2b0c"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.926946 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:56 crc kubenswrapper[4723]: E1211 15:23:56.928149 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.428136124 +0000 UTC m=+48.202369559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.929340 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" podStartSLOduration=25.929321446 podStartE2EDuration="25.929321446s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:56.925420721 +0000 UTC m=+47.699654166" watchObservedRunningTime="2025-12-11 15:23:56.929321446 +0000 UTC m=+47.703554881" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.929726 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" event={"ID":"85180921-c4ba-4b06-9240-4d35a5c57248","Type":"ContainerStarted","Data":"b35bdad23c2ce8e53c5f7904e91da19209d6bc365c2f4998b4db02967b233a64"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.946889 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.951769 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-d78kx" event={"ID":"166ce132-af47-4b71-930e-bd80549c3d3f","Type":"ContainerStarted","Data":"cef63a599e0289c0627e173343bb719725fce3e59d2f27b82c0e24eae7a897ae"} Dec 11 15:23:56 crc kubenswrapper[4723]: I1211 15:23:56.986615 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerStarted","Data":"8696682b5218f8db4a76019f7dbbd3b060429bce2801c7ddfdb716aca5e362f2"} Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.003872 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s276d" event={"ID":"e014794a-d5a4-4ced-b54e-55130268ffd2","Type":"ContainerStarted","Data":"d5a47efc2f5755a118c3d858cb67b28a533dd097a6b4be946cb3fb92c3e12e89"} Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.005002 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-s276d" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.034664 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.036770 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.536748119 +0000 UTC m=+48.310981554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.037289 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" event={"ID":"dd17f73a-62e0-4e24-924c-a449a813f55b","Type":"ContainerStarted","Data":"fcc5a4b00cf8de2266283eca1283a5b087cc2f2ac641c65fdc4c60a534c19b7c"} Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.037344 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.105747 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-jrjdt" podStartSLOduration=26.105720195 podStartE2EDuration="26.105720195s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:57.105125589 +0000 UTC m=+47.879359034" watchObservedRunningTime="2025-12-11 15:23:57.105720195 +0000 UTC m=+47.879953630" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.109151 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:57 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:57 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:57 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.109222 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.137859 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.143090 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.643066614 +0000 UTC m=+48.417300049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.192571 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gb8rc"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.192724 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-s276d" podStartSLOduration=10.192703832 podStartE2EDuration="10.192703832s" podCreationTimestamp="2025-12-11 15:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:57.156034391 +0000 UTC m=+47.930267826" watchObservedRunningTime="2025-12-11 15:23:57.192703832 +0000 UTC m=+47.966937267" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.197208 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.220308 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" podStartSLOduration=25.22029292 podStartE2EDuration="25.22029292s" podCreationTimestamp="2025-12-11 15:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:57.218593074 +0000 UTC m=+47.992826509" watchObservedRunningTime="2025-12-11 15:23:57.22029292 +0000 UTC m=+47.994526345" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.243461 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.243815 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.743798068 +0000 UTC m=+48.518031503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.346031 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.346473 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.846457895 +0000 UTC m=+48.620691330 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.447172 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfg4x"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.447233 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.447354 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.947330733 +0000 UTC m=+48.721564168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.447441 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.447826 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:57.947812246 +0000 UTC m=+48.722045681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: W1211 15:23:57.453379 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod075c6779_3a8e_480d_9d98_27bb4728e95a.slice/crio-19e7a0871ed5a93aaa530d03479cd69f1783ef3946d3c4ddda80e3483a69ed5c WatchSource:0}: Error finding container 19e7a0871ed5a93aaa530d03479cd69f1783ef3946d3c4ddda80e3483a69ed5c: Status 404 returned error can't find the container with id 19e7a0871ed5a93aaa530d03479cd69f1783ef3946d3c4ddda80e3483a69ed5c Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.537341 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.538593 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.542167 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.548557 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.548744 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.048716426 +0000 UTC m=+48.822949871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.548902 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.549225 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.049214129 +0000 UTC m=+48.823447574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.556302 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.650045 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.650276 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.150240941 +0000 UTC m=+48.924474386 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.650637 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9gb\" (UniqueName: \"kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.650698 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.650735 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.650762 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.651213 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.151196057 +0000 UTC m=+48.925429492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.751922 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.752157 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.252121307 +0000 UTC m=+49.026354742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752229 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9gb\" (UniqueName: \"kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752310 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752363 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752386 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.752884 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.252785585 +0000 UTC m=+49.027019020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752937 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.752950 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.772316 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9gb\" (UniqueName: \"kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb\") pod \"redhat-operators-8xn7k\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.779429 4723 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-fr92q container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.32:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.779489 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.32:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.853677 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.853864 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.353829938 +0000 UTC m=+49.128063383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.854122 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.854471 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.354460085 +0000 UTC m=+49.128693560 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.895881 4723 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-hvds8 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.895982 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" podUID="cecdb332-0935-414b-a3f3-c5ee04a211c9" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.24:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.929393 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.930534 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xtk8c"] Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.931866 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.955591 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.955743 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.455726644 +0000 UTC m=+49.229960079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.955931 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:57 crc kubenswrapper[4723]: E1211 15:23:57.956221 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.456213697 +0000 UTC m=+49.230447132 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:57 crc kubenswrapper[4723]: I1211 15:23:57.976447 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtk8c"] Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.026698 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:58 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:58 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:58 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.026768 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.057292 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.057374 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.557356693 +0000 UTC m=+49.331590128 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.057493 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.057515 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m48kj\" (UniqueName: \"kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.057537 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.057559 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.057867 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.557860336 +0000 UTC m=+49.332093771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.158888 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.159072 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.659051163 +0000 UTC m=+49.433284598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159223 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159254 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m48kj\" (UniqueName: \"kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159278 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159312 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.159788 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.659768102 +0000 UTC m=+49.434001537 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159807 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.159878 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.180458 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m48kj\" (UniqueName: \"kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj\") pod \"redhat-operators-xtk8c\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.259986 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.260222 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.260392 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.760369013 +0000 UTC m=+49.534602458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.260747 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.261134 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.761123104 +0000 UTC m=+49.535356539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.362211 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.362360 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.862334981 +0000 UTC m=+49.636568426 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.362617 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.362917 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.862906577 +0000 UTC m=+49.637140012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.464488 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.464753 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.96471777 +0000 UTC m=+49.738951205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.465053 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.465475 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:58.96546697 +0000 UTC m=+49.739700405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.516517 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerStarted","Data":"5bdbf067376bf5d66153e7834f5d1bd15d8bdd98030abf8d3b127991523bf6c3"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.529604 4723 generic.go:334] "Generic (PLEG): container finished" podID="935f64e7-557b-4244-b69e-b0943965db19" containerID="5ce8e12a4c08971cf7920bd15d5ff94555d40350377d1be624a09a210d8a9b46" exitCode=0 Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.529706 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerDied","Data":"5ce8e12a4c08971cf7920bd15d5ff94555d40350377d1be624a09a210d8a9b46"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.545817 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-tdqkq" event={"ID":"b6cbbbf3-7b03-4a74-82c6-147c26fca1d4","Type":"ContainerStarted","Data":"47a4380cfc8178ef19346d85f424f2bfd22da0528d4613f155551ef5da367199"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.565883 4723 generic.go:334] "Generic (PLEG): container finished" podID="55e2838f-2f65-426a-aa56-0ec318cee927" containerID="6b44e7284c0b4fda649ec69dd8700a1dfbbf3303438e9aa581e40bfae95dd600" exitCode=0 Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.566017 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerDied","Data":"6b44e7284c0b4fda649ec69dd8700a1dfbbf3303438e9aa581e40bfae95dd600"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.569112 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.569633 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.069613506 +0000 UTC m=+49.843846941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.572905 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" event={"ID":"b789a0fc-d92b-43f2-bb28-0e522ae80af8","Type":"ContainerStarted","Data":"13d9b46713af3b55d7b6a5e0686aa19d228af9da2bb28383e40d9de14c9c3f1e"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.580839 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerStarted","Data":"19e7a0871ed5a93aaa530d03479cd69f1783ef3946d3c4ddda80e3483a69ed5c"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.599344 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s276d" event={"ID":"e014794a-d5a4-4ced-b54e-55130268ffd2","Type":"ContainerStarted","Data":"3191b9477a01e0eb99ee4418ee243492eebe57e920d131bbdaebf81897f67328"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.612827 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerStarted","Data":"0e9d67e355f126fbd7e3d80ecc1b178f6a90402465a5fed1f0eb0896506c9475"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.631946 4723 generic.go:334] "Generic (PLEG): container finished" podID="164595ee-6652-4559-b8dd-7e040aa4602d" containerID="ed710ae72560d25b24958b457f0cfbfe351b1c7e1fa7f08f3823b69c8d214f83" exitCode=0 Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.632057 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerDied","Data":"ed710ae72560d25b24958b457f0cfbfe351b1c7e1fa7f08f3823b69c8d214f83"} Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.676349 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.677919 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.177901103 +0000 UTC m=+49.952134538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.729238 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-hvds8" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.780764 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.781524 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.281493094 +0000 UTC m=+50.055726539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.810653 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.858897 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" podStartSLOduration=27.858860904 podStartE2EDuration="27.858860904s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:23:58.797549164 +0000 UTC m=+49.571782619" watchObservedRunningTime="2025-12-11 15:23:58.858860904 +0000 UTC m=+49.633094339" Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.888425 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.888839 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.388820676 +0000 UTC m=+50.163054101 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:58 crc kubenswrapper[4723]: I1211 15:23:58.989488 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:58 crc kubenswrapper[4723]: E1211 15:23:58.990100 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.490076144 +0000 UTC m=+50.264309579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.036259 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:23:59 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:23:59 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:23:59 crc kubenswrapper[4723]: healthz check failed Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.036339 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.093402 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.093937 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.593924743 +0000 UTC m=+50.368158178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.200656 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.201253 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.701230972 +0000 UTC m=+50.475464407 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.302332 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.302671 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.802656225 +0000 UTC m=+50.576889660 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.311988 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtk8c"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.343817 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.344584 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.351846 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.362594 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.367464 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.404218 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.404490 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.904461979 +0000 UTC m=+50.678695414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.404952 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.405146 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.405415 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.405862 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:23:59.905846216 +0000 UTC m=+50.680079651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.447209 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.506290 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.506487 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.006457087 +0000 UTC m=+50.780690522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.506577 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.506646 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.506720 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.506843 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.506930 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.00692228 +0000 UTC m=+50.781155715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.538636 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.607820 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.608032 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.107999454 +0000 UTC m=+50.882232889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.608209 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.608526 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.108514478 +0000 UTC m=+50.882747983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.639416 4723 generic.go:334] "Generic (PLEG): container finished" podID="812543bf-43e0-49c1-8348-890db4be7090" containerID="17bab4659994331b17dd16be8b8c95c07d38d8749f175c81583989b0fdf4a276" exitCode=0 Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.639527 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerDied","Data":"17bab4659994331b17dd16be8b8c95c07d38d8749f175c81583989b0fdf4a276"} Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.642650 4723 generic.go:334] "Generic (PLEG): container finished" podID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerID="b40ede035ac2db2bf1506aa3505b5e63cf2750140418fe2becf158e86f0afe37" exitCode=0 Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.642766 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerDied","Data":"b40ede035ac2db2bf1506aa3505b5e63cf2750140418fe2becf158e86f0afe37"} Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.644861 4723 generic.go:334] "Generic (PLEG): container finished" podID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerID="61d52ba4e73d154b781c84ccb608c395538bb9cff24fe28f116dcc552d451535" exitCode=0 Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.644892 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerDied","Data":"61d52ba4e73d154b781c84ccb608c395538bb9cff24fe28f116dcc552d451535"} Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.647396 4723 generic.go:334] "Generic (PLEG): container finished" podID="878f7948-c80f-4d35-82f0-0eb16b515ac8" containerID="427a1e278b9f717bc18cd84cc1f542c4b33e21237fd07c51041013acdca07250" exitCode=0 Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.647490 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" event={"ID":"878f7948-c80f-4d35-82f0-0eb16b515ac8","Type":"ContainerDied","Data":"427a1e278b9f717bc18cd84cc1f542c4b33e21237fd07c51041013acdca07250"} Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.674083 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.709490 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.709957 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.20990987 +0000 UTC m=+50.984143315 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.710780 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.712534 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.21251945 +0000 UTC m=+50.986753095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.725006 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.730381 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.733792 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.734029 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.746290 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.756767 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.756813 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.812647 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.812870 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.812981 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.813315 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.313269955 +0000 UTC m=+51.087503380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.917140 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.917236 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.917357 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: E1211 15:23:59.917625 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.417596796 +0000 UTC m=+51.191830401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.917723 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.945806 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:23:59 crc kubenswrapper[4723]: I1211 15:23:59.988269 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.019987 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.021233 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.521197388 +0000 UTC m=+51.295430873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.026029 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.031881 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:00 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:00 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:00 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.031942 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.053696 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.127374 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.129641 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.629620278 +0000 UTC m=+51.403853713 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.232789 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.233274 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.733254751 +0000 UTC m=+51.507488196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.256934 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-nxcpx" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.335927 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.346095 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.846079649 +0000 UTC m=+51.620313084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.447363 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.448142 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.451112 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:00.951074048 +0000 UTC m=+51.725307483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.456486 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.458522 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.482395 4723 patch_prober.go:28] interesting pod/apiserver-76f77b778f-5wvnh container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]log ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]etcd ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/max-in-flight-filter ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 11 15:24:00 crc kubenswrapper[4723]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 11 15:24:00 crc kubenswrapper[4723]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/project.openshift.io-projectcache ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 11 15:24:00 crc kubenswrapper[4723]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 11 15:24:00 crc kubenswrapper[4723]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 15:24:00 crc kubenswrapper[4723]: livez check failed Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.482452 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" podUID="b789a0fc-d92b-43f2-bb28-0e522ae80af8" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:00 crc kubenswrapper[4723]: W1211 15:24:00.524676 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode6fd408f_dfdf_4334_b9fc_3a858899ea44.slice/crio-3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d WatchSource:0}: Error finding container 3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d: Status 404 returned error can't find the container with id 3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.538765 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.539241 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.554845 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.555308 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.055291356 +0000 UTC m=+51.829524791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.562120 4723 patch_prober.go:28] interesting pod/console-f9d7485db-dw6bx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.562195 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dw6bx" podUID="1dae0577-a799-4ba7-9cc2-f6c38436bae4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.660144 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.660573 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.660829 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.660859 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.665091 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.165065332 +0000 UTC m=+51.939298767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.678943 4723 patch_prober.go:28] interesting pod/downloads-7954f5f757-sjsqg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.679041 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-sjsqg" podUID="3cabbadb-fc14-4253-a767-d153aa9604bc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.679493 4723 patch_prober.go:28] interesting pod/downloads-7954f5f757-sjsqg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" start-of-body= Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.679510 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sjsqg" podUID="3cabbadb-fc14-4253-a767-d153aa9604bc" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.8:8080/\": dial tcp 10.217.0.8:8080: connect: connection refused" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.688213 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.689570 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.696074 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.764383 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.765041 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.265024346 +0000 UTC m=+52.039257781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.773396 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6fd408f-dfdf-4334-b9fc-3a858899ea44","Type":"ContainerStarted","Data":"3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d"} Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.784246 4723 generic.go:334] "Generic (PLEG): container finished" podID="52430e74-4768-47fa-966f-09160199d877" containerID="861aafac588c23dc387507ad7c1c59801f5a448a525ef77bec0d0e383fbf7fb2" exitCode=0 Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.784332 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerDied","Data":"861aafac588c23dc387507ad7c1c59801f5a448a525ef77bec0d0e383fbf7fb2"} Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.784369 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerStarted","Data":"96b3cc2441963f050436ec515ffa74bade2b249c91508228f968d0c3ea3c71fa"} Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.812203 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.858504 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.859208 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.871596 4723 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.872590 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.873200 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.373162319 +0000 UTC m=+52.147395904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.880133 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.880671 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" event={"ID":"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3","Type":"ContainerStarted","Data":"9fd2bd257088980f9581cae5e52dc98a9213c9e9e54fd736ab5261b18b882c1f"} Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.894362 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.894506 4723 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.894816 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:24:00 crc kubenswrapper[4723]: W1211 15:24:00.905601 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod2761bf07_c71e_4ea5_9fb4_4a764224bc15.slice/crio-45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86 WatchSource:0}: Error finding container 45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86: Status 404 returned error can't find the container with id 45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86 Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.906452 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.914535 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.953506 4723 generic.go:334] "Generic (PLEG): container finished" podID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerID="cac593226510a5051b74c72d164a9ce8a03cae227303c4f252d13d7db39dd4ea" exitCode=0 Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.955843 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerDied","Data":"cac593226510a5051b74c72d164a9ce8a03cae227303c4f252d13d7db39dd4ea"} Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.955902 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerStarted","Data":"0e9d8ce295e080af93acbfeff5dbbe6c7ca56bf04e2b96160da9c73a09192912"} Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.965136 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 15:24:00 crc kubenswrapper[4723]: I1211 15:24:00.974692 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:00 crc kubenswrapper[4723]: E1211 15:24:00.975217 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.475198119 +0000 UTC m=+52.249431554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.047810 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:01 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:01 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:01 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.047878 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.100501 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:01 crc kubenswrapper[4723]: E1211 15:24:01.108333 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.60830709 +0000 UTC m=+52.382540525 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.200910 4723 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-11T15:24:00.871637548Z","Handler":null,"Name":""} Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.203745 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:01 crc kubenswrapper[4723]: E1211 15:24:01.204168 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 15:24:01.704148964 +0000 UTC m=+52.478382399 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w957v" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.228420 4723 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.228478 4723 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.307842 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.317822 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.410497 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.416298 4723 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.416349 4723 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.602856 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.711624 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.778840 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=1.778816127 podStartE2EDuration="1.778816127s" podCreationTimestamp="2025-12-11 15:24:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:01.772380345 +0000 UTC m=+52.546613810" watchObservedRunningTime="2025-12-11 15:24:01.778816127 +0000 UTC m=+52.553049562" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.793184 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w957v\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.827013 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prq97\" (UniqueName: \"kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97\") pod \"878f7948-c80f-4d35-82f0-0eb16b515ac8\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.828765 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume\") pod \"878f7948-c80f-4d35-82f0-0eb16b515ac8\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.828851 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume\") pod \"878f7948-c80f-4d35-82f0-0eb16b515ac8\" (UID: \"878f7948-c80f-4d35-82f0-0eb16b515ac8\") " Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.831292 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume" (OuterVolumeSpecName: "config-volume") pod "878f7948-c80f-4d35-82f0-0eb16b515ac8" (UID: "878f7948-c80f-4d35-82f0-0eb16b515ac8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.845508 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97" (OuterVolumeSpecName: "kube-api-access-prq97") pod "878f7948-c80f-4d35-82f0-0eb16b515ac8" (UID: "878f7948-c80f-4d35-82f0-0eb16b515ac8"). InnerVolumeSpecName "kube-api-access-prq97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.845612 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "878f7948-c80f-4d35-82f0-0eb16b515ac8" (UID: "878f7948-c80f-4d35-82f0-0eb16b515ac8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.864085 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:01 crc kubenswrapper[4723]: W1211 15:24:01.877697 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-7c21152f5e283901f22055e0fb42de88c58f41423bc6cf9c11ac7b18b48248b2 WatchSource:0}: Error finding container 7c21152f5e283901f22055e0fb42de88c58f41423bc6cf9c11ac7b18b48248b2: Status 404 returned error can't find the container with id 7c21152f5e283901f22055e0fb42de88c58f41423bc6cf9c11ac7b18b48248b2 Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.932343 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prq97\" (UniqueName: \"kubernetes.io/projected/878f7948-c80f-4d35-82f0-0eb16b515ac8-kube-api-access-prq97\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.932383 4723 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/878f7948-c80f-4d35-82f0-0eb16b515ac8-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:01 crc kubenswrapper[4723]: I1211 15:24:01.932397 4723 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/878f7948-c80f-4d35-82f0-0eb16b515ac8-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:01 crc kubenswrapper[4723]: W1211 15:24:01.956834 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-ec0476af1d2a57fca0ddf41c08f966dc7e3e911be5ae5f5128f1bc24df110d84 WatchSource:0}: Error finding container ec0476af1d2a57fca0ddf41c08f966dc7e3e911be5ae5f5128f1bc24df110d84: Status 404 returned error can't find the container with id ec0476af1d2a57fca0ddf41c08f966dc7e3e911be5ae5f5128f1bc24df110d84 Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.012839 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.012787 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424435-z2fjq" event={"ID":"878f7948-c80f-4d35-82f0-0eb16b515ac8","Type":"ContainerDied","Data":"842140101cd610efd1790b0667a747dd249ddf3802da7010eae4d9ec8c57404c"} Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.012942 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="842140101cd610efd1790b0667a747dd249ddf3802da7010eae4d9ec8c57404c" Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.029006 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:02 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:02 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:02 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.029048 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.044431 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" event={"ID":"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3","Type":"ContainerStarted","Data":"786e8a864e733f538f9f71e24ebc79dcb39955eb3c157ee5b260a4430afa0965"} Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.049598 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d95ef9b79634a78efbc20295f0985b7703d62d56b2b18860d9248b2c6c75da8c"} Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.051360 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7c21152f5e283901f22055e0fb42de88c58f41423bc6cf9c11ac7b18b48248b2"} Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.052442 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2761bf07-c71e-4ea5-9fb4-4a764224bc15","Type":"ContainerStarted","Data":"45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86"} Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.373377 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:24:02 crc kubenswrapper[4723]: W1211 15:24:02.430445 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c1c36ea_a0e0_4973_a356_e069f2eb2d4f.slice/crio-a807a4148057781202971bf3fcbdfccd310d3709a91870851ac3152dd9e73b54 WatchSource:0}: Error finding container a807a4148057781202971bf3fcbdfccd310d3709a91870851ac3152dd9e73b54: Status 404 returned error can't find the container with id a807a4148057781202971bf3fcbdfccd310d3709a91870851ac3152dd9e73b54 Dec 11 15:24:02 crc kubenswrapper[4723]: I1211 15:24:02.614025 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.029636 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:03 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:03 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:03 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.029763 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.067043 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6fd408f-dfdf-4334-b9fc-3a858899ea44","Type":"ContainerStarted","Data":"99405626190caec9b76fe8bd9c9d11139f7e704ef07ff2e99774f7411dc66d36"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.076107 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7dee62131aaeeeda12f5b2dc869d3369041fd1664231b55396b22f2f92884a04"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.076253 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.082407 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a8ec4c83077cc73cd7103df2a90ee3f66af976404fbc02249b56d5a71c075285"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.095018 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.094988536 podStartE2EDuration="4.094988536s" podCreationTimestamp="2025-12-11 15:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:03.094485283 +0000 UTC m=+53.868718738" watchObservedRunningTime="2025-12-11 15:24:03.094988536 +0000 UTC m=+53.869221971" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.105350 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2761bf07-c71e-4ea5-9fb4-4a764224bc15","Type":"ContainerStarted","Data":"9b74104ef7c62f8bae1eac46ab0969e025b0c7337ee9f10ad0ced44dfbf9f5b9"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.125800 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" event={"ID":"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f","Type":"ContainerStarted","Data":"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.125852 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" event={"ID":"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f","Type":"ContainerStarted","Data":"a807a4148057781202971bf3fcbdfccd310d3709a91870851ac3152dd9e73b54"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.126673 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.185028 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" podStartSLOduration=32.185001654 podStartE2EDuration="32.185001654s" podCreationTimestamp="2025-12-11 15:23:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:03.161091834 +0000 UTC m=+53.935325279" watchObservedRunningTime="2025-12-11 15:24:03.185001654 +0000 UTC m=+53.959235089" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.188710 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.188694493 podStartE2EDuration="4.188694493s" podCreationTimestamp="2025-12-11 15:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:03.183790732 +0000 UTC m=+53.958024167" watchObservedRunningTime="2025-12-11 15:24:03.188694493 +0000 UTC m=+53.962927928" Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.193046 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" event={"ID":"a973b8a6-f5a5-4347-b2cf-ed1ff60f05f3","Type":"ContainerStarted","Data":"acedda22ee3af324308bb8e661c6866edb67d5b8699bdeb1ea654b4ecb14f75e"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.197012 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"848c3cf088af094f939880c57e9bc1552f32ae6bbc7dd8f682ee5e14cb341f3f"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.197073 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ec0476af1d2a57fca0ddf41c08f966dc7e3e911be5ae5f5128f1bc24df110d84"} Dec 11 15:24:03 crc kubenswrapper[4723]: I1211 15:24:03.234334 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-qm4lg" podStartSLOduration=16.234304523 podStartE2EDuration="16.234304523s" podCreationTimestamp="2025-12-11 15:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:03.223560016 +0000 UTC m=+53.997793461" watchObservedRunningTime="2025-12-11 15:24:03.234304523 +0000 UTC m=+54.008537958" Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.028990 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:04 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:04 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:04 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.029050 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.233196 4723 generic.go:334] "Generic (PLEG): container finished" podID="e6fd408f-dfdf-4334-b9fc-3a858899ea44" containerID="99405626190caec9b76fe8bd9c9d11139f7e704ef07ff2e99774f7411dc66d36" exitCode=0 Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.233748 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6fd408f-dfdf-4334-b9fc-3a858899ea44","Type":"ContainerDied","Data":"99405626190caec9b76fe8bd9c9d11139f7e704ef07ff2e99774f7411dc66d36"} Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.249776 4723 generic.go:334] "Generic (PLEG): container finished" podID="2761bf07-c71e-4ea5-9fb4-4a764224bc15" containerID="9b74104ef7c62f8bae1eac46ab0969e025b0c7337ee9f10ad0ced44dfbf9f5b9" exitCode=0 Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.249847 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2761bf07-c71e-4ea5-9fb4-4a764224bc15","Type":"ContainerDied","Data":"9b74104ef7c62f8bae1eac46ab0969e025b0c7337ee9f10ad0ced44dfbf9f5b9"} Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.758812 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:24:04 crc kubenswrapper[4723]: I1211 15:24:04.771353 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-5wvnh" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.026292 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:05 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:05 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:05 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.026363 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.606283 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.691454 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.734503 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access\") pod \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.734605 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir\") pod \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\" (UID: \"2761bf07-c71e-4ea5-9fb4-4a764224bc15\") " Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.734866 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2761bf07-c71e-4ea5-9fb4-4a764224bc15" (UID: "2761bf07-c71e-4ea5-9fb4-4a764224bc15"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.774112 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2761bf07-c71e-4ea5-9fb4-4a764224bc15" (UID: "2761bf07-c71e-4ea5-9fb4-4a764224bc15"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.805806 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-s276d" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.845672 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e6fd408f-dfdf-4334-b9fc-3a858899ea44" (UID: "e6fd408f-dfdf-4334-b9fc-3a858899ea44"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851127 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access\") pod \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851296 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir\") pod \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\" (UID: \"e6fd408f-dfdf-4334-b9fc-3a858899ea44\") " Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851574 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e6fd408f-dfdf-4334-b9fc-3a858899ea44" (UID: "e6fd408f-dfdf-4334-b9fc-3a858899ea44"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851930 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851944 4723 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851954 4723 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2761bf07-c71e-4ea5-9fb4-4a764224bc15-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:05 crc kubenswrapper[4723]: I1211 15:24:05.851984 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e6fd408f-dfdf-4334-b9fc-3a858899ea44-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.028296 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:06 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:06 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:06 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.028365 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.286229 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.286220 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e6fd408f-dfdf-4334-b9fc-3a858899ea44","Type":"ContainerDied","Data":"3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d"} Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.286734 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ca75c00a9958725819b0e7acaa40ea31f6ff2caea43d30cdcf0889dbdfa3b5d" Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.293941 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2761bf07-c71e-4ea5-9fb4-4a764224bc15","Type":"ContainerDied","Data":"45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86"} Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.294011 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45251082c8caf27416fee25fc78d7544aec76ec1adf6d5d8a3527bbea3351e86" Dec 11 15:24:06 crc kubenswrapper[4723]: I1211 15:24:06.294089 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 15:24:07 crc kubenswrapper[4723]: I1211 15:24:07.027407 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:07 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:07 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:07 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:07 crc kubenswrapper[4723]: I1211 15:24:07.027472 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:08 crc kubenswrapper[4723]: I1211 15:24:08.027672 4723 patch_prober.go:28] interesting pod/router-default-5444994796-hw8r7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 15:24:08 crc kubenswrapper[4723]: [-]has-synced failed: reason withheld Dec 11 15:24:08 crc kubenswrapper[4723]: [+]process-running ok Dec 11 15:24:08 crc kubenswrapper[4723]: healthz check failed Dec 11 15:24:08 crc kubenswrapper[4723]: I1211 15:24:08.027774 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hw8r7" podUID="65e96d17-3f27-42cb-a6cc-b911057378ab" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 15:24:09 crc kubenswrapper[4723]: I1211 15:24:09.105339 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:24:09 crc kubenswrapper[4723]: I1211 15:24:09.109041 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-hw8r7" Dec 11 15:24:10 crc kubenswrapper[4723]: I1211 15:24:10.535448 4723 patch_prober.go:28] interesting pod/console-f9d7485db-dw6bx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 11 15:24:10 crc kubenswrapper[4723]: I1211 15:24:10.536040 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dw6bx" podUID="1dae0577-a799-4ba7-9cc2-f6c38436bae4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 11 15:24:10 crc kubenswrapper[4723]: I1211 15:24:10.699433 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-sjsqg" Dec 11 15:24:10 crc kubenswrapper[4723]: E1211 15:24:10.831923 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:10 crc kubenswrapper[4723]: E1211 15:24:10.836837 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:10 crc kubenswrapper[4723]: E1211 15:24:10.838813 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:10 crc kubenswrapper[4723]: E1211 15:24:10.838855 4723 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:20 crc kubenswrapper[4723]: I1211 15:24:20.540587 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:24:20 crc kubenswrapper[4723]: I1211 15:24:20.545298 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dw6bx" Dec 11 15:24:20 crc kubenswrapper[4723]: E1211 15:24:20.827350 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:20 crc kubenswrapper[4723]: E1211 15:24:20.833961 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:20 crc kubenswrapper[4723]: E1211 15:24:20.835218 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:20 crc kubenswrapper[4723]: E1211 15:24:20.835264 4723 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:21 crc kubenswrapper[4723]: I1211 15:24:21.571747 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 11 15:24:21 crc kubenswrapper[4723]: I1211 15:24:21.870017 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:24:21 crc kubenswrapper[4723]: I1211 15:24:21.883992 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=0.883949613 podStartE2EDuration="883.949613ms" podCreationTimestamp="2025-12-11 15:24:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:24:21.880516709 +0000 UTC m=+72.654750154" watchObservedRunningTime="2025-12-11 15:24:21.883949613 +0000 UTC m=+72.658183048" Dec 11 15:24:27 crc kubenswrapper[4723]: I1211 15:24:27.451052 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-tbf9x_b9cb7bb8-2613-4876-aa27-39d58db1aae1/kube-multus-additional-cni-plugins/0.log" Dec 11 15:24:27 crc kubenswrapper[4723]: I1211 15:24:27.451159 4723 generic.go:334] "Generic (PLEG): container finished" podID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" exitCode=137 Dec 11 15:24:27 crc kubenswrapper[4723]: I1211 15:24:27.451214 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" event={"ID":"b9cb7bb8-2613-4876-aa27-39d58db1aae1","Type":"ContainerDied","Data":"e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875"} Dec 11 15:24:30 crc kubenswrapper[4723]: I1211 15:24:30.110459 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-kq5tw" Dec 11 15:24:30 crc kubenswrapper[4723]: E1211 15:24:30.825765 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:30 crc kubenswrapper[4723]: E1211 15:24:30.826384 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:30 crc kubenswrapper[4723]: E1211 15:24:30.827076 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:30 crc kubenswrapper[4723]: E1211 15:24:30.827116 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.931525 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 15:24:33 crc kubenswrapper[4723]: E1211 15:24:33.931811 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2761bf07-c71e-4ea5-9fb4-4a764224bc15" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.931826 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="2761bf07-c71e-4ea5-9fb4-4a764224bc15" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: E1211 15:24:33.931838 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="878f7948-c80f-4d35-82f0-0eb16b515ac8" containerName="collect-profiles" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.931845 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="878f7948-c80f-4d35-82f0-0eb16b515ac8" containerName="collect-profiles" Dec 11 15:24:33 crc kubenswrapper[4723]: E1211 15:24:33.931860 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6fd408f-dfdf-4334-b9fc-3a858899ea44" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.931867 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6fd408f-dfdf-4334-b9fc-3a858899ea44" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.932016 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6fd408f-dfdf-4334-b9fc-3a858899ea44" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.932030 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="878f7948-c80f-4d35-82f0-0eb16b515ac8" containerName="collect-profiles" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.932039 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="2761bf07-c71e-4ea5-9fb4-4a764224bc15" containerName="pruner" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.932571 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.935389 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.935582 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.941656 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.987632 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:33 crc kubenswrapper[4723]: I1211 15:24:33.987683 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:34 crc kubenswrapper[4723]: I1211 15:24:34.089116 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:34 crc kubenswrapper[4723]: I1211 15:24:34.089183 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:34 crc kubenswrapper[4723]: I1211 15:24:34.089297 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:34 crc kubenswrapper[4723]: I1211 15:24:34.110487 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:34 crc kubenswrapper[4723]: I1211 15:24:34.258816 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.529808 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.531275 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.548866 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.680208 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.680490 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.680621 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.781749 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.781840 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.781870 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.781890 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.782020 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.801919 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access\") pod \"installer-9-crc\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:38 crc kubenswrapper[4723]: I1211 15:24:38.865565 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:24:40 crc kubenswrapper[4723]: E1211 15:24:40.825462 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:40 crc kubenswrapper[4723]: E1211 15:24:40.826200 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:40 crc kubenswrapper[4723]: E1211 15:24:40.827161 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:40 crc kubenswrapper[4723]: E1211 15:24:40.827253 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:41 crc kubenswrapper[4723]: I1211 15:24:41.614449 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 15:24:50 crc kubenswrapper[4723]: E1211 15:24:50.826415 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:50 crc kubenswrapper[4723]: E1211 15:24:50.827520 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:50 crc kubenswrapper[4723]: E1211 15:24:50.828455 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 15:24:50 crc kubenswrapper[4723]: E1211 15:24:50.828495 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875 is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.132361 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-tbf9x_b9cb7bb8-2613-4876-aa27-39d58db1aae1/kube-multus-additional-cni-plugins/0.log" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.133070 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:24:58 crc kubenswrapper[4723]: E1211 15:24:58.242482 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 15:24:58 crc kubenswrapper[4723]: E1211 15:24:58.242762 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6hmzt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wk4hd_openshift-marketplace(935f64e7-557b-4244-b69e-b0943965db19): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:24:58 crc kubenswrapper[4723]: E1211 15:24:58.247231 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wk4hd" podUID="935f64e7-557b-4244-b69e-b0943965db19" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.274900 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist\") pod \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.275009 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready\") pod \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.275034 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frp2j\" (UniqueName: \"kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j\") pod \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.275174 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir\") pod \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\" (UID: \"b9cb7bb8-2613-4876-aa27-39d58db1aae1\") " Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.275419 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "b9cb7bb8-2613-4876-aa27-39d58db1aae1" (UID: "b9cb7bb8-2613-4876-aa27-39d58db1aae1"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.276001 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready" (OuterVolumeSpecName: "ready") pod "b9cb7bb8-2613-4876-aa27-39d58db1aae1" (UID: "b9cb7bb8-2613-4876-aa27-39d58db1aae1"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.276012 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "b9cb7bb8-2613-4876-aa27-39d58db1aae1" (UID: "b9cb7bb8-2613-4876-aa27-39d58db1aae1"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.281799 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j" (OuterVolumeSpecName: "kube-api-access-frp2j") pod "b9cb7bb8-2613-4876-aa27-39d58db1aae1" (UID: "b9cb7bb8-2613-4876-aa27-39d58db1aae1"). InnerVolumeSpecName "kube-api-access-frp2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.376913 4723 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b9cb7bb8-2613-4876-aa27-39d58db1aae1-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.376952 4723 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b9cb7bb8-2613-4876-aa27-39d58db1aae1-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.376979 4723 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/b9cb7bb8-2613-4876-aa27-39d58db1aae1-ready\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.376988 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frp2j\" (UniqueName: \"kubernetes.io/projected/b9cb7bb8-2613-4876-aa27-39d58db1aae1-kube-api-access-frp2j\") on node \"crc\" DevicePath \"\"" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.627800 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-tbf9x_b9cb7bb8-2613-4876-aa27-39d58db1aae1/kube-multus-additional-cni-plugins/0.log" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.627950 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" event={"ID":"b9cb7bb8-2613-4876-aa27-39d58db1aae1","Type":"ContainerDied","Data":"0d92f62263746c0fde935b76e0b81f6a78267251be83ddd675f8389bcccda725"} Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.628014 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-tbf9x" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.628053 4723 scope.go:117] "RemoveContainer" containerID="e10ef44ec2e42811915780fb8ac93f933c1bd2eaa713d7fd5eb688eb7986b875" Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.665221 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-tbf9x"] Dec 11 15:24:58 crc kubenswrapper[4723]: I1211 15:24:58.668557 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-tbf9x"] Dec 11 15:24:59 crc kubenswrapper[4723]: I1211 15:24:59.556651 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" path="/var/lib/kubelet/pods/b9cb7bb8-2613-4876-aa27-39d58db1aae1/volumes" Dec 11 15:25:08 crc kubenswrapper[4723]: E1211 15:25:08.193444 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wk4hd" podUID="935f64e7-557b-4244-b69e-b0943965db19" Dec 11 15:25:08 crc kubenswrapper[4723]: E1211 15:25:08.339847 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 15:25:08 crc kubenswrapper[4723]: E1211 15:25:08.340307 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z5rvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dsn9s_openshift-marketplace(55e2838f-2f65-426a-aa56-0ec318cee927): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:08 crc kubenswrapper[4723]: E1211 15:25:08.341507 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dsn9s" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" Dec 11 15:25:28 crc kubenswrapper[4723]: E1211 15:25:28.953682 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 15:25:28 crc kubenswrapper[4723]: E1211 15:25:28.954567 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mdjq4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gb8rc_openshift-marketplace(28807a39-7a71-4a91-8e2d-586ce6a0c451): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:28 crc kubenswrapper[4723]: E1211 15:25:28.955852 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gb8rc" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.283611 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gb8rc" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.384040 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.384622 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m48kj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xtk8c_openshift-marketplace(255ec090-7fde-4cd0-b318-4689a8d9ea0b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.386419 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xtk8c" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.493258 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.493421 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zq4hn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hqpzz_openshift-marketplace(164595ee-6652-4559-b8dd-7e040aa4602d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:32 crc kubenswrapper[4723]: E1211 15:25:32.494610 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hqpzz" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" Dec 11 15:25:33 crc kubenswrapper[4723]: E1211 15:25:33.729796 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 15:25:33 crc kubenswrapper[4723]: E1211 15:25:33.730186 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kv9gb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-8xn7k_openshift-marketplace(52430e74-4768-47fa-966f-09160199d877): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:33 crc kubenswrapper[4723]: E1211 15:25:33.731705 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-8xn7k" podUID="52430e74-4768-47fa-966f-09160199d877" Dec 11 15:25:34 crc kubenswrapper[4723]: E1211 15:25:34.345502 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hqpzz" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" Dec 11 15:25:34 crc kubenswrapper[4723]: E1211 15:25:34.346021 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xtk8c" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" Dec 11 15:25:34 crc kubenswrapper[4723]: E1211 15:25:34.346178 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-8xn7k" podUID="52430e74-4768-47fa-966f-09160199d877" Dec 11 15:25:34 crc kubenswrapper[4723]: I1211 15:25:34.769009 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 15:25:34 crc kubenswrapper[4723]: I1211 15:25:34.790500 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 15:25:34 crc kubenswrapper[4723]: I1211 15:25:34.898504 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"aaa5fc4f-966f-4bfa-9801-8de33c422283","Type":"ContainerStarted","Data":"e77e37277d035972ff31e8c6b454d179a462e27adae93129dc55c7936860c29c"} Dec 11 15:25:34 crc kubenswrapper[4723]: I1211 15:25:34.901884 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"782a1e65-eeb8-4f28-8177-990863f34535","Type":"ContainerStarted","Data":"0a4a6c4944a47ab703928e6fe19cae082391fe06a46df2aa8c13d60876be0bc6"} Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.104582 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.105177 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gxmp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nfg4x_openshift-marketplace(075c6779-3a8e-480d-9d98-27bb4728e95a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.107342 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nfg4x" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.538770 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.539011 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ghcnp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-px8w4_openshift-marketplace(812543bf-43e0-49c1-8348-890db4be7090): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:25:35 crc kubenswrapper[4723]: E1211 15:25:35.540662 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-px8w4" podUID="812543bf-43e0-49c1-8348-890db4be7090" Dec 11 15:25:35 crc kubenswrapper[4723]: I1211 15:25:35.909337 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"782a1e65-eeb8-4f28-8177-990863f34535","Type":"ContainerStarted","Data":"c62c20f5a5d46e90f86017364c7bb2235c39169dc89a6a04eb14af7ee85cde2f"} Dec 11 15:25:35 crc kubenswrapper[4723]: I1211 15:25:35.911845 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"aaa5fc4f-966f-4bfa-9801-8de33c422283","Type":"ContainerStarted","Data":"aa1d478832be5d27c2a1f2e967ae45a56b4a78ecb21e100e950e4174e2d84f26"} Dec 11 15:25:35 crc kubenswrapper[4723]: I1211 15:25:35.928507 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=62.928487251 podStartE2EDuration="1m2.928487251s" podCreationTimestamp="2025-12-11 15:24:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:25:35.926879385 +0000 UTC m=+146.701112820" watchObservedRunningTime="2025-12-11 15:25:35.928487251 +0000 UTC m=+146.702720686" Dec 11 15:25:35 crc kubenswrapper[4723]: I1211 15:25:35.946660 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=57.946645238 podStartE2EDuration="57.946645238s" podCreationTimestamp="2025-12-11 15:24:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:25:35.944416755 +0000 UTC m=+146.718650200" watchObservedRunningTime="2025-12-11 15:25:35.946645238 +0000 UTC m=+146.720878673" Dec 11 15:25:37 crc kubenswrapper[4723]: I1211 15:25:37.961469 4723 generic.go:334] "Generic (PLEG): container finished" podID="782a1e65-eeb8-4f28-8177-990863f34535" containerID="c62c20f5a5d46e90f86017364c7bb2235c39169dc89a6a04eb14af7ee85cde2f" exitCode=0 Dec 11 15:25:37 crc kubenswrapper[4723]: I1211 15:25:37.961539 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"782a1e65-eeb8-4f28-8177-990863f34535","Type":"ContainerDied","Data":"c62c20f5a5d46e90f86017364c7bb2235c39169dc89a6a04eb14af7ee85cde2f"} Dec 11 15:25:43 crc kubenswrapper[4723]: I1211 15:25:43.745833 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:25:43 crc kubenswrapper[4723]: I1211 15:25:43.746738 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:25:45 crc kubenswrapper[4723]: E1211 15:25:45.304608 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-px8w4" podUID="812543bf-43e0-49c1-8348-890db4be7090" Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.352290 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.483765 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir\") pod \"782a1e65-eeb8-4f28-8177-990863f34535\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.483957 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access\") pod \"782a1e65-eeb8-4f28-8177-990863f34535\" (UID: \"782a1e65-eeb8-4f28-8177-990863f34535\") " Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.484298 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "782a1e65-eeb8-4f28-8177-990863f34535" (UID: "782a1e65-eeb8-4f28-8177-990863f34535"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.484726 4723 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/782a1e65-eeb8-4f28-8177-990863f34535-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.491124 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "782a1e65-eeb8-4f28-8177-990863f34535" (UID: "782a1e65-eeb8-4f28-8177-990863f34535"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:25:45 crc kubenswrapper[4723]: I1211 15:25:45.587654 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/782a1e65-eeb8-4f28-8177-990863f34535-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:25:46 crc kubenswrapper[4723]: I1211 15:25:46.012061 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerStarted","Data":"a63899336bacf471dcecdbaf9ffe11b121c38de2be4479613ff652a56783c5f6"} Dec 11 15:25:46 crc kubenswrapper[4723]: I1211 15:25:46.014855 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"782a1e65-eeb8-4f28-8177-990863f34535","Type":"ContainerDied","Data":"0a4a6c4944a47ab703928e6fe19cae082391fe06a46df2aa8c13d60876be0bc6"} Dec 11 15:25:46 crc kubenswrapper[4723]: I1211 15:25:46.014883 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a4a6c4944a47ab703928e6fe19cae082391fe06a46df2aa8c13d60876be0bc6" Dec 11 15:25:46 crc kubenswrapper[4723]: I1211 15:25:46.014923 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 15:25:46 crc kubenswrapper[4723]: I1211 15:25:46.018031 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerStarted","Data":"f02990612a9f6294981cd4e987e8709b954f06db1c714a9de89af8b7961d3755"} Dec 11 15:25:47 crc kubenswrapper[4723]: I1211 15:25:47.025007 4723 generic.go:334] "Generic (PLEG): container finished" podID="935f64e7-557b-4244-b69e-b0943965db19" containerID="f02990612a9f6294981cd4e987e8709b954f06db1c714a9de89af8b7961d3755" exitCode=0 Dec 11 15:25:47 crc kubenswrapper[4723]: I1211 15:25:47.025068 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerDied","Data":"f02990612a9f6294981cd4e987e8709b954f06db1c714a9de89af8b7961d3755"} Dec 11 15:25:47 crc kubenswrapper[4723]: I1211 15:25:47.028835 4723 generic.go:334] "Generic (PLEG): container finished" podID="55e2838f-2f65-426a-aa56-0ec318cee927" containerID="a63899336bacf471dcecdbaf9ffe11b121c38de2be4479613ff652a56783c5f6" exitCode=0 Dec 11 15:25:47 crc kubenswrapper[4723]: I1211 15:25:47.028871 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerDied","Data":"a63899336bacf471dcecdbaf9ffe11b121c38de2be4479613ff652a56783c5f6"} Dec 11 15:25:48 crc kubenswrapper[4723]: I1211 15:25:48.036520 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerStarted","Data":"0aea2e10e376df14cddc82c8bfefa9b493419d897526733de06d17bab8a5ba93"} Dec 11 15:25:48 crc kubenswrapper[4723]: I1211 15:25:48.038166 4723 generic.go:334] "Generic (PLEG): container finished" podID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerID="762bb4eb752b647d2fdd19e68c1e2ab9a8a1fa51befde7c1f303fcd156b48bb7" exitCode=0 Dec 11 15:25:48 crc kubenswrapper[4723]: I1211 15:25:48.038240 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerDied","Data":"762bb4eb752b647d2fdd19e68c1e2ab9a8a1fa51befde7c1f303fcd156b48bb7"} Dec 11 15:25:48 crc kubenswrapper[4723]: I1211 15:25:48.039939 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerStarted","Data":"78257f73839bb8b43224a27f4df63830c419c2feeec09b8209962c78b3d850e6"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.046761 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerStarted","Data":"b3f639d72cbd7620e2fe58b996492b7201be2f58f475e2fedb16d17b61864bbc"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.048698 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerStarted","Data":"2ace641654a4040d3ef63eda4e0e0618e31f808e96bd54e2bd4068584179d47c"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.051671 4723 generic.go:334] "Generic (PLEG): container finished" podID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerID="0aea2e10e376df14cddc82c8bfefa9b493419d897526733de06d17bab8a5ba93" exitCode=0 Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.051727 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerDied","Data":"0aea2e10e376df14cddc82c8bfefa9b493419d897526733de06d17bab8a5ba93"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.056416 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerStarted","Data":"33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.063624 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerStarted","Data":"9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.066470 4723 generic.go:334] "Generic (PLEG): container finished" podID="52430e74-4768-47fa-966f-09160199d877" containerID="78257f73839bb8b43224a27f4df63830c419c2feeec09b8209962c78b3d850e6" exitCode=0 Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.066519 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerDied","Data":"78257f73839bb8b43224a27f4df63830c419c2feeec09b8209962c78b3d850e6"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.069789 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerStarted","Data":"c12b6a7cbf639d5c261eaaae4736d49be91d67b01ee69fc9af6cb7f40841b4d3"} Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.136646 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nfg4x" podStartSLOduration=5.321545519 podStartE2EDuration="1m54.136630162s" podCreationTimestamp="2025-12-11 15:23:55 +0000 UTC" firstStartedPulling="2025-12-11 15:23:59.806045122 +0000 UTC m=+50.580278547" lastFinishedPulling="2025-12-11 15:25:48.621129745 +0000 UTC m=+159.395363190" observedRunningTime="2025-12-11 15:25:49.120645177 +0000 UTC m=+159.894878622" watchObservedRunningTime="2025-12-11 15:25:49.136630162 +0000 UTC m=+159.910863597" Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.162542 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wk4hd" podStartSLOduration=4.57057702 podStartE2EDuration="1m56.1625243s" podCreationTimestamp="2025-12-11 15:23:53 +0000 UTC" firstStartedPulling="2025-12-11 15:23:56.863641119 +0000 UTC m=+47.637874554" lastFinishedPulling="2025-12-11 15:25:48.455588399 +0000 UTC m=+159.229821834" observedRunningTime="2025-12-11 15:25:49.161776769 +0000 UTC m=+159.936010214" watchObservedRunningTime="2025-12-11 15:25:49.1625243 +0000 UTC m=+159.936757735" Dec 11 15:25:49 crc kubenswrapper[4723]: I1211 15:25:49.204157 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dsn9s" podStartSLOduration=6.507148557 podStartE2EDuration="1m56.204137585s" podCreationTimestamp="2025-12-11 15:23:53 +0000 UTC" firstStartedPulling="2025-12-11 15:23:58.56939508 +0000 UTC m=+49.343628515" lastFinishedPulling="2025-12-11 15:25:48.266384108 +0000 UTC m=+159.040617543" observedRunningTime="2025-12-11 15:25:49.186318378 +0000 UTC m=+159.960551823" watchObservedRunningTime="2025-12-11 15:25:49.204137585 +0000 UTC m=+159.978371020" Dec 11 15:25:50 crc kubenswrapper[4723]: I1211 15:25:50.081781 4723 generic.go:334] "Generic (PLEG): container finished" podID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerID="b3f639d72cbd7620e2fe58b996492b7201be2f58f475e2fedb16d17b61864bbc" exitCode=0 Dec 11 15:25:50 crc kubenswrapper[4723]: I1211 15:25:50.081898 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerDied","Data":"b3f639d72cbd7620e2fe58b996492b7201be2f58f475e2fedb16d17b61864bbc"} Dec 11 15:25:50 crc kubenswrapper[4723]: I1211 15:25:50.084022 4723 generic.go:334] "Generic (PLEG): container finished" podID="164595ee-6652-4559-b8dd-7e040aa4602d" containerID="2ace641654a4040d3ef63eda4e0e0618e31f808e96bd54e2bd4068584179d47c" exitCode=0 Dec 11 15:25:50 crc kubenswrapper[4723]: I1211 15:25:50.084519 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerDied","Data":"2ace641654a4040d3ef63eda4e0e0618e31f808e96bd54e2bd4068584179d47c"} Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.092730 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerStarted","Data":"6e745fcdd000061f4789b73f64536e6b57890dc1e471da0f6dd7658ef664f6c1"} Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.094941 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerStarted","Data":"d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9"} Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.097318 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerStarted","Data":"921102dd93e77e4004ea599ae48926db8e3e99aded68fcdb83f78fbefbeb3429"} Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.099639 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerStarted","Data":"02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0"} Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.120777 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hqpzz" podStartSLOduration=6.046940652 podStartE2EDuration="1m58.12076122s" podCreationTimestamp="2025-12-11 15:23:53 +0000 UTC" firstStartedPulling="2025-12-11 15:23:58.658440053 +0000 UTC m=+49.432673488" lastFinishedPulling="2025-12-11 15:25:50.732260621 +0000 UTC m=+161.506494056" observedRunningTime="2025-12-11 15:25:51.1175971 +0000 UTC m=+161.891830535" watchObservedRunningTime="2025-12-11 15:25:51.12076122 +0000 UTC m=+161.894994655" Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.146349 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gb8rc" podStartSLOduration=6.348084574 podStartE2EDuration="1m57.146325248s" podCreationTimestamp="2025-12-11 15:23:54 +0000 UTC" firstStartedPulling="2025-12-11 15:23:59.806374551 +0000 UTC m=+50.580607986" lastFinishedPulling="2025-12-11 15:25:50.604615215 +0000 UTC m=+161.378848660" observedRunningTime="2025-12-11 15:25:51.142654533 +0000 UTC m=+161.916887978" watchObservedRunningTime="2025-12-11 15:25:51.146325248 +0000 UTC m=+161.920558683" Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.185334 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8xn7k" podStartSLOduration=4.320845645 podStartE2EDuration="1m54.185315879s" podCreationTimestamp="2025-12-11 15:23:57 +0000 UTC" firstStartedPulling="2025-12-11 15:24:00.786344747 +0000 UTC m=+51.560578182" lastFinishedPulling="2025-12-11 15:25:50.650814981 +0000 UTC m=+161.425048416" observedRunningTime="2025-12-11 15:25:51.166165283 +0000 UTC m=+161.940398718" watchObservedRunningTime="2025-12-11 15:25:51.185315879 +0000 UTC m=+161.959549314" Dec 11 15:25:51 crc kubenswrapper[4723]: I1211 15:25:51.186128 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xtk8c" podStartSLOduration=4.347411905 podStartE2EDuration="1m54.186121092s" podCreationTimestamp="2025-12-11 15:23:57 +0000 UTC" firstStartedPulling="2025-12-11 15:24:00.997215678 +0000 UTC m=+51.771449113" lastFinishedPulling="2025-12-11 15:25:50.835924865 +0000 UTC m=+161.610158300" observedRunningTime="2025-12-11 15:25:51.183486717 +0000 UTC m=+161.957720152" watchObservedRunningTime="2025-12-11 15:25:51.186121092 +0000 UTC m=+161.960354527" Dec 11 15:25:54 crc kubenswrapper[4723]: I1211 15:25:54.129876 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:25:54 crc kubenswrapper[4723]: I1211 15:25:54.130235 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:25:54 crc kubenswrapper[4723]: I1211 15:25:54.159846 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:25:54 crc kubenswrapper[4723]: I1211 15:25:54.159923 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.005712 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.007155 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.164856 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.169305 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.326185 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.326255 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.367912 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.708865 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.709195 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:25:55 crc kubenswrapper[4723]: I1211 15:25:55.749459 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.168162 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.172188 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.208195 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.208246 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.255724 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:25:56 crc kubenswrapper[4723]: I1211 15:25:56.989307 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gb8rc"] Dec 11 15:25:57 crc kubenswrapper[4723]: I1211 15:25:57.178710 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:25:57 crc kubenswrapper[4723]: I1211 15:25:57.929959 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:25:57 crc kubenswrapper[4723]: I1211 15:25:57.930333 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:25:57 crc kubenswrapper[4723]: I1211 15:25:57.967484 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.134867 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gb8rc" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" containerID="cri-o://d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" gracePeriod=2 Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.174180 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.260604 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.260673 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.297788 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.779806 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dsn9s"] Dec 11 15:25:58 crc kubenswrapper[4723]: I1211 15:25:58.780199 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dsn9s" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" containerID="cri-o://33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" gracePeriod=2 Dec 11 15:25:59 crc kubenswrapper[4723]: I1211 15:25:59.177537 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:25:59 crc kubenswrapper[4723]: I1211 15:25:59.380497 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nfg4x"] Dec 11 15:25:59 crc kubenswrapper[4723]: I1211 15:25:59.380747 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nfg4x" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" containerID="cri-o://9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" gracePeriod=2 Dec 11 15:26:01 crc kubenswrapper[4723]: I1211 15:26:01.780737 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtk8c"] Dec 11 15:26:01 crc kubenswrapper[4723]: I1211 15:26:01.781289 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xtk8c" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" containerID="cri-o://02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" gracePeriod=2 Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.330608 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.331413 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.331819 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.331944 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-dsn9s" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.709600 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.710132 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.710518 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:05 crc kubenswrapper[4723]: E1211 15:26:05.710696 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-gb8rc" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" Dec 11 15:26:06 crc kubenswrapper[4723]: E1211 15:26:06.208730 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:06 crc kubenswrapper[4723]: E1211 15:26:06.209031 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:06 crc kubenswrapper[4723]: E1211 15:26:06.209352 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:06 crc kubenswrapper[4723]: E1211 15:26:06.209463 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-nfg4x" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" Dec 11 15:26:08 crc kubenswrapper[4723]: E1211 15:26:08.260783 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:08 crc kubenswrapper[4723]: E1211 15:26:08.261323 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:08 crc kubenswrapper[4723]: E1211 15:26:08.261739 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:08 crc kubenswrapper[4723]: E1211 15:26:08.261772 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-xtk8c" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" Dec 11 15:26:13 crc kubenswrapper[4723]: I1211 15:26:13.745819 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:26:13 crc kubenswrapper[4723]: I1211 15:26:13.746303 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.018225 4723 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.018465 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="782a1e65-eeb8-4f28-8177-990863f34535" containerName="pruner" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.018479 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="782a1e65-eeb8-4f28-8177-990863f34535" containerName="pruner" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.018489 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.018496 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.018621 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="782a1e65-eeb8-4f28-8177-990863f34535" containerName="pruner" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.018644 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9cb7bb8-2613-4876-aa27-39d58db1aae1" containerName="kube-multus-additional-cni-plugins" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019329 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019404 4723 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019636 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865" gracePeriod=15 Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019772 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://dabae3f47de894040d6fce7d39cafde130405d552e782c954b6bce9d85a30737" gracePeriod=15 Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019813 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98" gracePeriod=15 Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019846 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c" gracePeriod=15 Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.019877 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93" gracePeriod=15 Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.020954 4723 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021234 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021250 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021260 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021268 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021285 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021296 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021305 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021312 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021329 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021336 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021349 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021357 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021475 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021496 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021506 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021516 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021525 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021535 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.021659 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.021670 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.048033 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fr92q"] Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.053368 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.066477 4723 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161259 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161301 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161332 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161352 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161410 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161475 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161506 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.161563 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262625 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262683 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262721 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262751 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262770 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262773 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262824 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262789 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262852 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262872 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262886 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262898 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262920 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262921 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.262939 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.263013 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.326812 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.327780 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.328278 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.328326 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-dsn9s" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.329507 4723 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/certified-operators-dsn9s.188032ab5260933f\": dial tcp 38.102.83.151:6443: connect: connection refused" event="&Event{ObjectMeta:{certified-operators-dsn9s.188032ab5260933f openshift-marketplace 29256 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:certified-operators-dsn9s,UID:55e2838f-2f65-426a-aa56-0ec318cee927,APIVersion:v1,ResourceVersion:27688,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 15:26:05 +0000 UTC,LastTimestamp:2025-12-11 15:26:15.328380256 +0000 UTC m=+186.102613701,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 15:26:15 crc kubenswrapper[4723]: I1211 15:26:15.350122 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.709470 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.709775 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.710173 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:15 crc kubenswrapper[4723]: E1211 15:26:15.710214 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-gb8rc" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" Dec 11 15:26:16 crc kubenswrapper[4723]: E1211 15:26:16.208971 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:16 crc kubenswrapper[4723]: E1211 15:26:16.210119 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:16 crc kubenswrapper[4723]: E1211 15:26:16.210472 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:16 crc kubenswrapper[4723]: E1211 15:26:16.210547 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-nfg4x" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" Dec 11 15:26:17 crc kubenswrapper[4723]: E1211 15:26:17.376743 4723 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865.scope\": RecentStats: unable to find data in memory cache]" Dec 11 15:26:18 crc kubenswrapper[4723]: E1211 15:26:18.260631 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:18 crc kubenswrapper[4723]: E1211 15:26:18.261768 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:18 crc kubenswrapper[4723]: E1211 15:26:18.262207 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:26:18 crc kubenswrapper[4723]: E1211 15:26:18.262359 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-xtk8c" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" Dec 11 15:26:18 crc kubenswrapper[4723]: I1211 15:26:18.349593 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gb8rc_28807a39-7a71-4a91-8e2d-586ce6a0c451/registry-server/0.log" Dec 11 15:26:18 crc kubenswrapper[4723]: I1211 15:26:18.352193 4723 generic.go:334] "Generic (PLEG): container finished" podID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" exitCode=-1 Dec 11 15:26:18 crc kubenswrapper[4723]: I1211 15:26:18.352353 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerDied","Data":"d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9"} Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.042517 4723 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.043132 4723 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.043441 4723 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.043779 4723 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.044096 4723 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.044138 4723 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.044520 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="200ms" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.246396 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="400ms" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.358626 4723 generic.go:334] "Generic (PLEG): container finished" podID="aaa5fc4f-966f-4bfa-9801-8de33c422283" containerID="aa1d478832be5d27c2a1f2e967ae45a56b4a78ecb21e100e950e4174e2d84f26" exitCode=0 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.358695 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"aaa5fc4f-966f-4bfa-9801-8de33c422283","Type":"ContainerDied","Data":"aa1d478832be5d27c2a1f2e967ae45a56b4a78ecb21e100e950e4174e2d84f26"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.359408 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.359821 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.360775 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xtk8c_255ec090-7fde-4cd0-b318-4689a8d9ea0b/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.361377 4723 generic.go:334] "Generic (PLEG): container finished" podID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" exitCode=137 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.361427 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerDied","Data":"02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.362641 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dsn9s_55e2838f-2f65-426a-aa56-0ec318cee927/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.363134 4723 generic.go:334] "Generic (PLEG): container finished" podID="55e2838f-2f65-426a-aa56-0ec318cee927" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" exitCode=137 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.363181 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerDied","Data":"33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.364884 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.366003 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.367468 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dabae3f47de894040d6fce7d39cafde130405d552e782c954b6bce9d85a30737" exitCode=0 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.367487 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98" exitCode=0 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.367494 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c" exitCode=0 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.367501 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93" exitCode=2 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.367552 4723 scope.go:117] "RemoveContainer" containerID="2e1752507aa362773afa6adac7b08547c6bb8d50076459a049a12248f3d319a5" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.369581 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"742e22afe89b192b743c3dd6048235104c0ca2aaa4db9cfbf69b07f54f01ca64"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.369615 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"d65964dca049da3d28f350f1dd4024240406ec3b02f44022e55c11365afde17e"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.371517 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nfg4x_075c6779-3a8e-480d-9d98-27bb4728e95a/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.372272 4723 generic.go:334] "Generic (PLEG): container finished" podID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" exitCode=137 Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.372307 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerDied","Data":"9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7"} Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.551379 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.552369 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.579402 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xtk8c_255ec090-7fde-4cd0-b318-4689a8d9ea0b/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.580111 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.580618 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.580952 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.581388 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.585145 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.585538 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.585824 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.586180 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.586447 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.590161 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dsn9s_55e2838f-2f65-426a-aa56-0ec318cee927/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.591028 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.591385 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.591663 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.591964 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.592216 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.592473 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.594931 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nfg4x_075c6779-3a8e-480d-9d98-27bb4728e95a/registry-server/0.log" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.595619 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.596079 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.596346 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.596600 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.596845 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.597105 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.597344 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:19 crc kubenswrapper[4723]: E1211 15:26:19.647380 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="800ms" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727690 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities\") pod \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727747 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities\") pod \"28807a39-7a71-4a91-8e2d-586ce6a0c451\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727771 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities\") pod \"075c6779-3a8e-480d-9d98-27bb4728e95a\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727796 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5rvb\" (UniqueName: \"kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb\") pod \"55e2838f-2f65-426a-aa56-0ec318cee927\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727854 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content\") pod \"55e2838f-2f65-426a-aa56-0ec318cee927\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727874 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities\") pod \"55e2838f-2f65-426a-aa56-0ec318cee927\" (UID: \"55e2838f-2f65-426a-aa56-0ec318cee927\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.727912 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdjq4\" (UniqueName: \"kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4\") pod \"28807a39-7a71-4a91-8e2d-586ce6a0c451\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728028 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content\") pod \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728072 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content\") pod \"075c6779-3a8e-480d-9d98-27bb4728e95a\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728095 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content\") pod \"28807a39-7a71-4a91-8e2d-586ce6a0c451\" (UID: \"28807a39-7a71-4a91-8e2d-586ce6a0c451\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728120 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxmp7\" (UniqueName: \"kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7\") pod \"075c6779-3a8e-480d-9d98-27bb4728e95a\" (UID: \"075c6779-3a8e-480d-9d98-27bb4728e95a\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728139 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m48kj\" (UniqueName: \"kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj\") pod \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\" (UID: \"255ec090-7fde-4cd0-b318-4689a8d9ea0b\") " Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.728842 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities" (OuterVolumeSpecName: "utilities") pod "255ec090-7fde-4cd0-b318-4689a8d9ea0b" (UID: "255ec090-7fde-4cd0-b318-4689a8d9ea0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.729417 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities" (OuterVolumeSpecName: "utilities") pod "55e2838f-2f65-426a-aa56-0ec318cee927" (UID: "55e2838f-2f65-426a-aa56-0ec318cee927"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.729810 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities" (OuterVolumeSpecName: "utilities") pod "075c6779-3a8e-480d-9d98-27bb4728e95a" (UID: "075c6779-3a8e-480d-9d98-27bb4728e95a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.730551 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities" (OuterVolumeSpecName: "utilities") pod "28807a39-7a71-4a91-8e2d-586ce6a0c451" (UID: "28807a39-7a71-4a91-8e2d-586ce6a0c451"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.734396 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4" (OuterVolumeSpecName: "kube-api-access-mdjq4") pod "28807a39-7a71-4a91-8e2d-586ce6a0c451" (UID: "28807a39-7a71-4a91-8e2d-586ce6a0c451"). InnerVolumeSpecName "kube-api-access-mdjq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.734466 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7" (OuterVolumeSpecName: "kube-api-access-gxmp7") pod "075c6779-3a8e-480d-9d98-27bb4728e95a" (UID: "075c6779-3a8e-480d-9d98-27bb4728e95a"). InnerVolumeSpecName "kube-api-access-gxmp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.734591 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj" (OuterVolumeSpecName: "kube-api-access-m48kj") pod "255ec090-7fde-4cd0-b318-4689a8d9ea0b" (UID: "255ec090-7fde-4cd0-b318-4689a8d9ea0b"). InnerVolumeSpecName "kube-api-access-m48kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.735726 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb" (OuterVolumeSpecName: "kube-api-access-z5rvb") pod "55e2838f-2f65-426a-aa56-0ec318cee927" (UID: "55e2838f-2f65-426a-aa56-0ec318cee927"). InnerVolumeSpecName "kube-api-access-z5rvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.753842 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "075c6779-3a8e-480d-9d98-27bb4728e95a" (UID: "075c6779-3a8e-480d-9d98-27bb4728e95a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831149 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdjq4\" (UniqueName: \"kubernetes.io/projected/28807a39-7a71-4a91-8e2d-586ce6a0c451-kube-api-access-mdjq4\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831208 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831229 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxmp7\" (UniqueName: \"kubernetes.io/projected/075c6779-3a8e-480d-9d98-27bb4728e95a-kube-api-access-gxmp7\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831247 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m48kj\" (UniqueName: \"kubernetes.io/projected/255ec090-7fde-4cd0-b318-4689a8d9ea0b-kube-api-access-m48kj\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831261 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831274 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831289 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/075c6779-3a8e-480d-9d98-27bb4728e95a-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831302 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5rvb\" (UniqueName: \"kubernetes.io/projected/55e2838f-2f65-426a-aa56-0ec318cee927-kube-api-access-z5rvb\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:19 crc kubenswrapper[4723]: I1211 15:26:19.831315 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.060643 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55e2838f-2f65-426a-aa56-0ec318cee927" (UID: "55e2838f-2f65-426a-aa56-0ec318cee927"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.091149 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "255ec090-7fde-4cd0-b318-4689a8d9ea0b" (UID: "255ec090-7fde-4cd0-b318-4689a8d9ea0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.137748 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55e2838f-2f65-426a-aa56-0ec318cee927-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.137779 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/255ec090-7fde-4cd0-b318-4689a8d9ea0b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.380015 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dsn9s_55e2838f-2f65-426a-aa56-0ec318cee927/registry-server/0.log" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.380896 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dsn9s" event={"ID":"55e2838f-2f65-426a-aa56-0ec318cee927","Type":"ContainerDied","Data":"8696682b5218f8db4a76019f7dbbd3b060429bce2801c7ddfdb716aca5e362f2"} Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.381001 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dsn9s" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.386292 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.387012 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.387301 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.388005 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.388335 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.388562 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.390301 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.391776 4723 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865" exitCode=0 Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.395729 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nfg4x_075c6779-3a8e-480d-9d98-27bb4728e95a/registry-server/0.log" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.396810 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.396934 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nfg4x" event={"ID":"075c6779-3a8e-480d-9d98-27bb4728e95a","Type":"ContainerDied","Data":"19e7a0871ed5a93aaa530d03479cd69f1783ef3946d3c4ddda80e3483a69ed5c"} Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.397029 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nfg4x" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.397261 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.397590 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.397851 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.398392 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.398724 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.399191 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-xtk8c_255ec090-7fde-4cd0-b318-4689a8d9ea0b/registry-server/0.log" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.399191 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.399464 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.400092 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtk8c" event={"ID":"255ec090-7fde-4cd0-b318-4689a8d9ea0b","Type":"ContainerDied","Data":"0e9d8ce295e080af93acbfeff5dbbe6c7ca56bf04e2b96160da9c73a09192912"} Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.400174 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtk8c" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.399770 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.400927 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.401155 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.401360 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.401711 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.402044 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.402564 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.402808 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.403450 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.403805 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.404891 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gb8rc" event={"ID":"28807a39-7a71-4a91-8e2d-586ce6a0c451","Type":"ContainerDied","Data":"0e9d67e355f126fbd7e3d80ecc1b178f6a90402465a5fed1f0eb0896506c9475"} Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.404997 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gb8rc" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.405762 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.407839 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.408166 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.408453 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.408704 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.408920 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.415691 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.416815 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.418178 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.418420 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.418659 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.418900 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.426493 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.426798 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.427174 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.427685 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.428226 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.430085 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: E1211 15:26:20.447837 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="1.6s" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.637921 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.638479 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.638875 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.639144 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.639379 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.639626 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.639867 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746110 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir\") pod \"aaa5fc4f-966f-4bfa-9801-8de33c422283\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746166 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access\") pod \"aaa5fc4f-966f-4bfa-9801-8de33c422283\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746330 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock\") pod \"aaa5fc4f-966f-4bfa-9801-8de33c422283\" (UID: \"aaa5fc4f-966f-4bfa-9801-8de33c422283\") " Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746325 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "aaa5fc4f-966f-4bfa-9801-8de33c422283" (UID: "aaa5fc4f-966f-4bfa-9801-8de33c422283"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746666 4723 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.746567 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock" (OuterVolumeSpecName: "var-lock") pod "aaa5fc4f-966f-4bfa-9801-8de33c422283" (UID: "aaa5fc4f-966f-4bfa-9801-8de33c422283"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.749810 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "aaa5fc4f-966f-4bfa-9801-8de33c422283" (UID: "aaa5fc4f-966f-4bfa-9801-8de33c422283"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.848206 4723 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/aaa5fc4f-966f-4bfa-9801-8de33c422283-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:20 crc kubenswrapper[4723]: I1211 15:26:20.848520 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aaa5fc4f-966f-4bfa-9801-8de33c422283-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.413487 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"aaa5fc4f-966f-4bfa-9801-8de33c422283","Type":"ContainerDied","Data":"e77e37277d035972ff31e8c6b454d179a462e27adae93129dc55c7936860c29c"} Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.413546 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e77e37277d035972ff31e8c6b454d179a462e27adae93129dc55c7936860c29c" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.413555 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.431097 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.431823 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.432442 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.432740 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.433093 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:21 crc kubenswrapper[4723]: I1211 15:26:21.433442 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:22 crc kubenswrapper[4723]: E1211 15:26:22.048731 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="3.2s" Dec 11 15:26:22 crc kubenswrapper[4723]: E1211 15:26:22.196163 4723 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events/certified-operators-dsn9s.188032ab5260933f\": dial tcp 38.102.83.151:6443: connect: connection refused" event="&Event{ObjectMeta:{certified-operators-dsn9s.188032ab5260933f openshift-marketplace 29256 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:certified-operators-dsn9s,UID:55e2838f-2f65-426a-aa56-0ec318cee927,APIVersion:v1,ResourceVersion:27688,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of 33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 15:26:05 +0000 UTC,LastTimestamp:2025-12-11 15:26:15.328380256 +0000 UTC m=+186.102613701,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.767668 4723 scope.go:117] "RemoveContainer" containerID="33ae36e058d8344aea2267db297701f74c06f18fcec251004637b1e42214c5df" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.785087 4723 scope.go:117] "RemoveContainer" containerID="a63899336bacf471dcecdbaf9ffe11b121c38de2be4479613ff652a56783c5f6" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.800805 4723 scope.go:117] "RemoveContainer" containerID="6b44e7284c0b4fda649ec69dd8700a1dfbbf3303438e9aa581e40bfae95dd600" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.814581 4723 scope.go:117] "RemoveContainer" containerID="9b4edbad0e2683ed7f841e0465f9ba5d0216971464449bcb5d7a72a9af6d47a7" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.831569 4723 scope.go:117] "RemoveContainer" containerID="762bb4eb752b647d2fdd19e68c1e2ab9a8a1fa51befde7c1f303fcd156b48bb7" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.847030 4723 scope.go:117] "RemoveContainer" containerID="61d52ba4e73d154b781c84ccb608c395538bb9cff24fe28f116dcc552d451535" Dec 11 15:26:24 crc kubenswrapper[4723]: I1211 15:26:24.868080 4723 scope.go:117] "RemoveContainer" containerID="02b0a2ef20a320173a1117909c10426a33d3136cd2f0177e843641b49959f4d0" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.132442 4723 scope.go:117] "RemoveContainer" containerID="b3f639d72cbd7620e2fe58b996492b7201be2f58f475e2fedb16d17b61864bbc" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.157339 4723 scope.go:117] "RemoveContainer" containerID="cac593226510a5051b74c72d164a9ce8a03cae227303c4f252d13d7db39dd4ea" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.166799 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.167634 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.168440 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.168754 4723 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.169041 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.169342 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.169657 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.169960 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.170280 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.173529 4723 scope.go:117] "RemoveContainer" containerID="d4e20bb16a3738550395519d15e9da4c38c5fcf6ae43db3684422af753ed5aa9" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.189107 4723 scope.go:117] "RemoveContainer" containerID="0aea2e10e376df14cddc82c8bfefa9b493419d897526733de06d17bab8a5ba93" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.210989 4723 scope.go:117] "RemoveContainer" containerID="b40ede035ac2db2bf1506aa3505b5e63cf2750140418fe2becf158e86f0afe37" Dec 11 15:26:25 crc kubenswrapper[4723]: E1211 15:26:25.250496 4723 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.151:6443: connect: connection refused" interval="6.4s" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314533 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314660 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314684 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314713 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314780 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314865 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.314987 4723 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.315000 4723 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.315008 4723 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.447664 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.448805 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.457869 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.458585 4723 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.459140 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.459452 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.459789 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.460189 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.460711 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.463216 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.463680 4723 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.464204 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.464579 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.464951 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.465442 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.465885 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:25 crc kubenswrapper[4723]: I1211 15:26:25.562508 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.390520 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28807a39-7a71-4a91-8e2d-586ce6a0c451" (UID: "28807a39-7a71-4a91-8e2d-586ce6a0c451"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.395524 4723 scope.go:117] "RemoveContainer" containerID="dabae3f47de894040d6fce7d39cafde130405d552e782c954b6bce9d85a30737" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.411113 4723 scope.go:117] "RemoveContainer" containerID="f7162a2fc8f857baca9637968e7b99dc44794f0dcdb4239cccbe3fa97dc3cd98" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.418470 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.418680 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.418906 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.419163 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.419351 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.419558 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.423742 4723 scope.go:117] "RemoveContainer" containerID="3ff70f013958429c67d484e5001b13972069bc741315038e265ef2ec798db47c" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.426899 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28807a39-7a71-4a91-8e2d-586ce6a0c451-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.437431 4723 scope.go:117] "RemoveContainer" containerID="a4946fc080106c3b43251e408724a4140320be4db41eec99317c61aaa6c77d93" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.449355 4723 scope.go:117] "RemoveContainer" containerID="d76f17d56831e0550a89b2833ddc56116fe8beb1ab59054febe6b053b0a5c865" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.468128 4723 scope.go:117] "RemoveContainer" containerID="275e79925cb499480f2d63d6bee7b1832166d353ea79b352693a6ea8e43f91b2" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.548038 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.548981 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.550040 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.550331 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.550516 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.550694 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.550891 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.562714 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.562746 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:26 crc kubenswrapper[4723]: E1211 15:26:26.563141 4723 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:26 crc kubenswrapper[4723]: I1211 15:26:26.563747 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:27 crc kubenswrapper[4723]: I1211 15:26:27.473843 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"27e63b4f52b01cdd8f988be1beea925d87bf1fbb15aad96883a087eb233b3606"} Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.480795 4723 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="5c5a023f7cc696941ec3bfa4ecb6db28f63b6b136c63feeac87cbec64522cdb1" exitCode=0 Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.480901 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"5c5a023f7cc696941ec3bfa4ecb6db28f63b6b136c63feeac87cbec64522cdb1"} Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.481321 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.481340 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:28 crc kubenswrapper[4723]: E1211 15:26:28.482204 4723 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.483503 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.483799 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.484014 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.484250 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.484460 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.484807 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.485168 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.485237 4723 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="90f4d5e38cec9da43bfd952a821acb47cf0134d9f5d47656bed0c5159876e42b" exitCode=1 Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.485272 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"90f4d5e38cec9da43bfd952a821acb47cf0134d9f5d47656bed0c5159876e42b"} Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.485853 4723 scope.go:117] "RemoveContainer" containerID="90f4d5e38cec9da43bfd952a821acb47cf0134d9f5d47656bed0c5159876e42b" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.486231 4723 status_manager.go:851] "Failed to get status for pod" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" pod="openshift-marketplace/redhat-marketplace-nfg4x" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-nfg4x\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.486738 4723 status_manager.go:851] "Failed to get status for pod" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" pod="openshift-marketplace/redhat-operators-xtk8c" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xtk8c\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.487384 4723 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.488076 4723 status_manager.go:851] "Failed to get status for pod" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" pod="openshift-marketplace/community-operators-gb8rc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-gb8rc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.488430 4723 status_manager.go:851] "Failed to get status for pod" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" pod="openshift-marketplace/certified-operators-dsn9s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-dsn9s\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.488790 4723 status_manager.go:851] "Failed to get status for pod" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:28 crc kubenswrapper[4723]: I1211 15:26:28.490871 4723 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.151:6443: connect: connection refused" Dec 11 15:26:29 crc kubenswrapper[4723]: I1211 15:26:29.495807 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 15:26:29 crc kubenswrapper[4723]: I1211 15:26:29.496058 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0563ecfe023ceccc3a4858f7618e7059d4cabd5a230e59c9b7478eff065ad427"} Dec 11 15:26:29 crc kubenswrapper[4723]: I1211 15:26:29.501409 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b19dace0af0cb8e1405ca497e561194418604b6bd090fbfcd11a0ace242dfc1f"} Dec 11 15:26:29 crc kubenswrapper[4723]: I1211 15:26:29.503998 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerStarted","Data":"4cc0e579e4ffccf56f376d1a0a58377bc37aa6202056f499de3c4c6186b9d623"} Dec 11 15:26:30 crc kubenswrapper[4723]: I1211 15:26:30.512705 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b11843bb5b8453f5cdcc357062de56b09e20ff63a5f63c56b94246b098b4e5a5"} Dec 11 15:26:30 crc kubenswrapper[4723]: I1211 15:26:30.514499 4723 generic.go:334] "Generic (PLEG): container finished" podID="812543bf-43e0-49c1-8348-890db4be7090" containerID="4cc0e579e4ffccf56f376d1a0a58377bc37aa6202056f499de3c4c6186b9d623" exitCode=0 Dec 11 15:26:30 crc kubenswrapper[4723]: I1211 15:26:30.514540 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerDied","Data":"4cc0e579e4ffccf56f376d1a0a58377bc37aa6202056f499de3c4c6186b9d623"} Dec 11 15:26:31 crc kubenswrapper[4723]: I1211 15:26:31.526900 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"37e398e460a4e95c46c6bdbee104a11911a248fe60f01dfa4f6bfe6b7064e95d"} Dec 11 15:26:31 crc kubenswrapper[4723]: I1211 15:26:31.527311 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"709b5ff1333032daf0ed86c588d9101b634b966575b97657063d5a9428e6af80"} Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.536838 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5f99e0c74c6b68ecbc9b7bce27ef6c4f672fcb426d1183cbf0aedb923da5b27f"} Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.537119 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.537240 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.537276 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.539142 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerStarted","Data":"c0b862e3701beca77ff1258a7ba9788f7c471ef9ce2b2eb40f263c889268e28c"} Dec 11 15:26:32 crc kubenswrapper[4723]: I1211 15:26:32.766448 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:26:35 crc kubenswrapper[4723]: I1211 15:26:35.912887 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:26:35 crc kubenswrapper[4723]: I1211 15:26:35.913228 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:26:35 crc kubenswrapper[4723]: I1211 15:26:35.974728 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:26:36 crc kubenswrapper[4723]: I1211 15:26:36.410424 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:26:36 crc kubenswrapper[4723]: I1211 15:26:36.414546 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:26:36 crc kubenswrapper[4723]: I1211 15:26:36.563927 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:36 crc kubenswrapper[4723]: I1211 15:26:36.563994 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:36 crc kubenswrapper[4723]: I1211 15:26:36.571545 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:37 crc kubenswrapper[4723]: I1211 15:26:37.546961 4723 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:37 crc kubenswrapper[4723]: I1211 15:26:37.567550 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:37 crc kubenswrapper[4723]: I1211 15:26:37.567615 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:37 crc kubenswrapper[4723]: I1211 15:26:37.571736 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:37 crc kubenswrapper[4723]: I1211 15:26:37.574621 4723 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="6c92a25c-9a95-4d0b-8ff9-d66b9c259218" Dec 11 15:26:38 crc kubenswrapper[4723]: I1211 15:26:38.572132 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:38 crc kubenswrapper[4723]: I1211 15:26:38.572166 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:39 crc kubenswrapper[4723]: I1211 15:26:39.562933 4723 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="6c92a25c-9a95-4d0b-8ff9-d66b9c259218" Dec 11 15:26:40 crc kubenswrapper[4723]: I1211 15:26:40.073906 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" containerID="cri-o://af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802" gracePeriod=15 Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.539036 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.589723 4723 generic.go:334] "Generic (PLEG): container finished" podID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerID="af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802" exitCode=0 Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.589774 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" event={"ID":"0008032b-faa4-4c7a-87ea-5ede94bc0229","Type":"ContainerDied","Data":"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802"} Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.589817 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" event={"ID":"0008032b-faa4-4c7a-87ea-5ede94bc0229","Type":"ContainerDied","Data":"d80b35ae5c2252f9a7a855dce2875aa5c4e0d9307ec1355edbab42fb2ea6ede6"} Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.589838 4723 scope.go:117] "RemoveContainer" containerID="af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.589840 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fr92q" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.609220 4723 scope.go:117] "RemoveContainer" containerID="af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802" Dec 11 15:26:41 crc kubenswrapper[4723]: E1211 15:26:41.609678 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802\": container with ID starting with af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802 not found: ID does not exist" containerID="af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.609727 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802"} err="failed to get container status \"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802\": rpc error: code = NotFound desc = could not find container \"af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802\": container with ID starting with af8535242b266b508aa72871f9cdf1d032a7d8d813e12c6c7f8fb8e6b523a802 not found: ID does not exist" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.627830 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.627909 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.627985 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628051 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628083 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628135 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628170 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628200 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628234 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628261 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628285 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628325 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7kxk\" (UniqueName: \"kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628376 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628435 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca\") pod \"0008032b-faa4-4c7a-87ea-5ede94bc0229\" (UID: \"0008032b-faa4-4c7a-87ea-5ede94bc0229\") " Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.628875 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.630427 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.630469 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.630487 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.630559 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.635194 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.635803 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk" (OuterVolumeSpecName: "kube-api-access-h7kxk") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "kube-api-access-h7kxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.636042 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.636302 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.636570 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.636759 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.636891 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.637177 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.637786 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0008032b-faa4-4c7a-87ea-5ede94bc0229" (UID: "0008032b-faa4-4c7a-87ea-5ede94bc0229"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.729835 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7kxk\" (UniqueName: \"kubernetes.io/projected/0008032b-faa4-4c7a-87ea-5ede94bc0229-kube-api-access-h7kxk\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.729901 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.729913 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730012 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730049 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730061 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730075 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730087 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730098 4723 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730113 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730124 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730134 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730144 4723 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0008032b-faa4-4c7a-87ea-5ede94bc0229-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:41 crc kubenswrapper[4723]: I1211 15:26:41.730155 4723 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0008032b-faa4-4c7a-87ea-5ede94bc0229-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:26:42 crc kubenswrapper[4723]: I1211 15:26:42.771930 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 15:26:43 crc kubenswrapper[4723]: I1211 15:26:43.745100 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:26:43 crc kubenswrapper[4723]: I1211 15:26:43.745376 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:26:43 crc kubenswrapper[4723]: I1211 15:26:43.745477 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:26:43 crc kubenswrapper[4723]: I1211 15:26:43.746112 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:26:43 crc kubenswrapper[4723]: I1211 15:26:43.746228 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b" gracePeriod=600 Dec 11 15:26:45 crc kubenswrapper[4723]: I1211 15:26:45.614469 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b" exitCode=0 Dec 11 15:26:45 crc kubenswrapper[4723]: I1211 15:26:45.614566 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b"} Dec 11 15:26:45 crc kubenswrapper[4723]: I1211 15:26:45.614899 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a"} Dec 11 15:26:45 crc kubenswrapper[4723]: I1211 15:26:45.636727 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 15:26:45 crc kubenswrapper[4723]: I1211 15:26:45.954165 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:26:46 crc kubenswrapper[4723]: I1211 15:26:46.724103 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 15:26:46 crc kubenswrapper[4723]: I1211 15:26:46.765254 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 15:26:46 crc kubenswrapper[4723]: I1211 15:26:46.967811 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 15:26:47 crc kubenswrapper[4723]: I1211 15:26:47.242360 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 15:26:47 crc kubenswrapper[4723]: I1211 15:26:47.292719 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 15:26:47 crc kubenswrapper[4723]: I1211 15:26:47.336190 4723 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 15:26:47 crc kubenswrapper[4723]: I1211 15:26:47.613818 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 15:26:48 crc kubenswrapper[4723]: I1211 15:26:48.009922 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 15:26:48 crc kubenswrapper[4723]: I1211 15:26:48.079388 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 15:26:48 crc kubenswrapper[4723]: I1211 15:26:48.371923 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 15:26:48 crc kubenswrapper[4723]: I1211 15:26:48.420173 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.043041 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.086772 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.197682 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.342044 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.351420 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.425942 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.463402 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.717617 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.878211 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.893233 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.959555 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 15:26:49 crc kubenswrapper[4723]: I1211 15:26:49.966800 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.007434 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.007695 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.013567 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.035419 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.128857 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.192842 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.200307 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.227682 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.282519 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.311217 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.399542 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.452539 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.529494 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.567553 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.572367 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.664029 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.756014 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.830713 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 15:26:50 crc kubenswrapper[4723]: I1211 15:26:50.994345 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.033950 4723 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.085721 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.091476 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.173462 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.199092 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.208230 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.225584 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.233579 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.248660 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.357354 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.368111 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.383600 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.393641 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.419884 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.599044 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.620155 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.702985 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.719198 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.735328 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.759615 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.760310 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.800856 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.851728 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 15:26:51 crc kubenswrapper[4723]: I1211 15:26:51.957367 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.090785 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.163764 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.168564 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.220615 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.266036 4723 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.301297 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.327931 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.411876 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.428146 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.533769 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.583696 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.584829 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.589478 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.656148 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.685554 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.687430 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.690336 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.720654 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.791910 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.821520 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 15:26:52 crc kubenswrapper[4723]: I1211 15:26:52.845826 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.020341 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.153038 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.164840 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.204995 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.420770 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.464037 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.634681 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.671206 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.817865 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.922244 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 15:26:53 crc kubenswrapper[4723]: I1211 15:26:53.967343 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.003094 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.040493 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.046169 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.167213 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.170139 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.246462 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.272631 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.370887 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.388340 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.464861 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.476293 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.488539 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.491623 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.498167 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.712452 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.729627 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.735695 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.756899 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.772441 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.773591 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.780093 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.829430 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.838188 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.931687 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 15:26:54 crc kubenswrapper[4723]: I1211 15:26:54.952845 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.062471 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.077011 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.086617 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.157472 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.168022 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.191028 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.317023 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.380025 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.423502 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.607833 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.629892 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.640934 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.688723 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.716239 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.769913 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.875873 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 15:26:55 crc kubenswrapper[4723]: I1211 15:26:55.949250 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.050292 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.139630 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.210639 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.411452 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.458079 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.498676 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.515323 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.626525 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.723033 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.751419 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.805943 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.861702 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.925236 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.975080 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 15:26:56 crc kubenswrapper[4723]: I1211 15:26:56.978295 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.019769 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.072465 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.141468 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.279824 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.340996 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.367031 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.370293 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.380459 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.421319 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.471048 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.479162 4723 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.479959 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-px8w4" podStartSLOduration=30.618940341 podStartE2EDuration="3m2.479936587s" podCreationTimestamp="2025-12-11 15:23:55 +0000 UTC" firstStartedPulling="2025-12-11 15:23:59.806918655 +0000 UTC m=+50.581152090" lastFinishedPulling="2025-12-11 15:26:31.667914901 +0000 UTC m=+202.442148336" observedRunningTime="2025-12-11 15:26:36.298215849 +0000 UTC m=+207.072449304" watchObservedRunningTime="2025-12-11 15:26:57.479936587 +0000 UTC m=+228.254170032" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.489890 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.502987 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=42.502950763 podStartE2EDuration="42.502950763s" podCreationTimestamp="2025-12-11 15:26:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:26:36.220948467 +0000 UTC m=+206.995181902" watchObservedRunningTime="2025-12-11 15:26:57.502950763 +0000 UTC m=+228.277184188" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.503445 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.503545 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gb8rc","openshift-marketplace/redhat-operators-xtk8c","openshift-authentication/oauth-openshift-558db77b4-fr92q","openshift-marketplace/redhat-marketplace-nfg4x","openshift-marketplace/certified-operators-dsn9s","openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.503602 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.503854 4723 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.503875 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d878d079-6be7-4e08-8d65-1795bb539c61" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.510344 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.526610 4723 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.542254 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.542239152 podStartE2EDuration="20.542239152s" podCreationTimestamp="2025-12-11 15:26:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:26:57.523074806 +0000 UTC m=+228.297308241" watchObservedRunningTime="2025-12-11 15:26:57.542239152 +0000 UTC m=+228.316472587" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.569447 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" path="/var/lib/kubelet/pods/0008032b-faa4-4c7a-87ea-5ede94bc0229/volumes" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.570142 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" path="/var/lib/kubelet/pods/075c6779-3a8e-480d-9d98-27bb4728e95a/volumes" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.570808 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" path="/var/lib/kubelet/pods/255ec090-7fde-4cd0-b318-4689a8d9ea0b/volumes" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.571823 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" path="/var/lib/kubelet/pods/28807a39-7a71-4a91-8e2d-586ce6a0c451/volumes" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.573392 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" path="/var/lib/kubelet/pods/55e2838f-2f65-426a-aa56-0ec318cee927/volumes" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.654047 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.669566 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.707566 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.713622 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.801384 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.838088 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.867806 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.904609 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.923579 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.976883 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.989868 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45"] Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990124 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990141 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990154 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990162 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990171 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990178 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990189 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990196 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990203 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990210 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990218 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990224 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990232 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990238 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990246 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990252 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990259 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990266 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990277 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990284 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990296 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990303 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="extract-utilities" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990312 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990318 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="extract-content" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990327 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990333 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: E1211 15:26:57.990340 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" containerName="installer" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990347 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" containerName="installer" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990433 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaa5fc4f-966f-4bfa-9801-8de33c422283" containerName="installer" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990444 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e2838f-2f65-426a-aa56-0ec318cee927" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990452 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="28807a39-7a71-4a91-8e2d-586ce6a0c451" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990458 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="0008032b-faa4-4c7a-87ea-5ede94bc0229" containerName="oauth-openshift" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990467 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="255ec090-7fde-4cd0-b318-4689a8d9ea0b" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990474 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="075c6779-3a8e-480d-9d98-27bb4728e95a" containerName="registry-server" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.990811 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.993725 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.994354 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.994638 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.994637 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.994869 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.995176 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.995303 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 15:26:57 crc kubenswrapper[4723]: I1211 15:26:57.995859 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.000522 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.003295 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.005307 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.005479 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.010751 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.012215 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.019065 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45"] Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.020886 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.055026 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153262 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-session\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153310 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-policies\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153336 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153361 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153378 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153393 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153422 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153601 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr47q\" (UniqueName: \"kubernetes.io/projected/3146cc93-41cb-4a34-90a0-3df49138ae7d-kube-api-access-dr47q\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153661 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153688 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153730 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-dir\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153750 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153802 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.153823 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.171499 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.225646 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.254961 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255044 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr47q\" (UniqueName: \"kubernetes.io/projected/3146cc93-41cb-4a34-90a0-3df49138ae7d-kube-api-access-dr47q\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255075 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255097 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255127 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-dir\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255145 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255170 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255188 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255214 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-session\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255231 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-policies\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255250 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255259 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-dir\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255269 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255535 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255577 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255811 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-service-ca\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.255941 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.256794 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-audit-policies\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.257163 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.261100 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.261129 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-router-certs\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.261369 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-error\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.261625 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.261900 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-session\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.263567 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-login\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.266540 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.274690 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr47q\" (UniqueName: \"kubernetes.io/projected/3146cc93-41cb-4a34-90a0-3df49138ae7d-kube-api-access-dr47q\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.280903 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3146cc93-41cb-4a34-90a0-3df49138ae7d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5d4f55d7c5-xgp45\" (UID: \"3146cc93-41cb-4a34-90a0-3df49138ae7d\") " pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.307293 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.320469 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.385721 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.409992 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.441188 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.515373 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45"] Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.596161 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.642761 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.682324 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" event={"ID":"3146cc93-41cb-4a34-90a0-3df49138ae7d","Type":"ContainerStarted","Data":"af56f2b24c81129f48999f3fe52f0e805ee1214b5f4f16377ab851cca09bc7d7"} Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.705369 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.705509 4723 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.743304 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.782051 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.922177 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 15:26:58 crc kubenswrapper[4723]: I1211 15:26:58.936543 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.203684 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.219863 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.254460 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.276217 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.499174 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.508518 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.562598 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.672954 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.739926 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.961501 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.972574 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 15:26:59 crc kubenswrapper[4723]: I1211 15:26:59.977405 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.064319 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.165082 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.184492 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.186649 4723 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.186889 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://742e22afe89b192b743c3dd6048235104c0ca2aaa4db9cfbf69b07f54f01ca64" gracePeriod=5 Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.329474 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.368147 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.371181 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.530477 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 15:27:00 crc kubenswrapper[4723]: I1211 15:27:00.917584 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.104670 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.195931 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.275410 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.292671 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.454801 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.555663 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.604500 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.697932 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" event={"ID":"3146cc93-41cb-4a34-90a0-3df49138ae7d","Type":"ContainerStarted","Data":"aa0af85965fbbe18616094cb5407cb08c290ade1ef6cbad8e369929fb8ea99ed"} Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.746136 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.766647 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.825862 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 15:27:01 crc kubenswrapper[4723]: I1211 15:27:01.931256 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.019263 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.176843 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.335016 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.363124 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.415255 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.646488 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 15:27:02 crc kubenswrapper[4723]: I1211 15:27:02.709482 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 15:27:03 crc kubenswrapper[4723]: I1211 15:27:03.437378 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 15:27:03 crc kubenswrapper[4723]: I1211 15:27:03.603589 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 15:27:03 crc kubenswrapper[4723]: I1211 15:27:03.709367 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:27:03 crc kubenswrapper[4723]: I1211 15:27:03.715052 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" Dec 11 15:27:03 crc kubenswrapper[4723]: I1211 15:27:03.745888 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5d4f55d7c5-xgp45" podStartSLOduration=7.745870414 podStartE2EDuration="7.745870414s" podCreationTimestamp="2025-12-11 15:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:03.74189646 +0000 UTC m=+234.516129905" watchObservedRunningTime="2025-12-11 15:27:03.745870414 +0000 UTC m=+234.520103869" Dec 11 15:27:04 crc kubenswrapper[4723]: I1211 15:27:04.165257 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 15:27:04 crc kubenswrapper[4723]: I1211 15:27:04.829052 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 15:27:06 crc kubenswrapper[4723]: I1211 15:27:06.726737 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 15:27:06 crc kubenswrapper[4723]: I1211 15:27:06.727105 4723 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="742e22afe89b192b743c3dd6048235104c0ca2aaa4db9cfbf69b07f54f01ca64" exitCode=137 Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.136024 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.136106 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259185 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259258 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259309 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259350 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259375 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259400 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259440 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259446 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259467 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259623 4723 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259637 4723 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259646 4723 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.259653 4723 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.269726 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.361065 4723 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.557101 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.557347 4723 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.566262 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.566327 4723 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="f8f0a8f8-fb8e-4da5-9073-895dbb6c3512" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.570283 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.570323 4723 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="f8f0a8f8-fb8e-4da5-9073-895dbb6c3512" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.734627 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.735501 4723 scope.go:117] "RemoveContainer" containerID="742e22afe89b192b743c3dd6048235104c0ca2aaa4db9cfbf69b07f54f01ca64" Dec 11 15:27:07 crc kubenswrapper[4723]: I1211 15:27:07.735643 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 15:27:15 crc kubenswrapper[4723]: I1211 15:27:15.489854 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:27:15 crc kubenswrapper[4723]: I1211 15:27:15.491786 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" containerName="controller-manager" containerID="cri-o://b35bdad23c2ce8e53c5f7904e91da19209d6bc365c2f4998b4db02967b233a64" gracePeriod=30 Dec 11 15:27:15 crc kubenswrapper[4723]: I1211 15:27:15.590759 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:27:15 crc kubenswrapper[4723]: I1211 15:27:15.591045 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerName="route-controller-manager" containerID="cri-o://ce5d5ea832f9f9f280551085324bd397b22c8c3842523318fd55eb965fcb2e09" gracePeriod=30 Dec 11 15:27:16 crc kubenswrapper[4723]: I1211 15:27:16.787561 4723 generic.go:334] "Generic (PLEG): container finished" podID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerID="ce5d5ea832f9f9f280551085324bd397b22c8c3842523318fd55eb965fcb2e09" exitCode=0 Dec 11 15:27:16 crc kubenswrapper[4723]: I1211 15:27:16.788030 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" event={"ID":"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c","Type":"ContainerDied","Data":"ce5d5ea832f9f9f280551085324bd397b22c8c3842523318fd55eb965fcb2e09"} Dec 11 15:27:16 crc kubenswrapper[4723]: I1211 15:27:16.789942 4723 generic.go:334] "Generic (PLEG): container finished" podID="85180921-c4ba-4b06-9240-4d35a5c57248" containerID="b35bdad23c2ce8e53c5f7904e91da19209d6bc365c2f4998b4db02967b233a64" exitCode=0 Dec 11 15:27:16 crc kubenswrapper[4723]: I1211 15:27:16.790023 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" event={"ID":"85180921-c4ba-4b06-9240-4d35a5c57248","Type":"ContainerDied","Data":"b35bdad23c2ce8e53c5f7904e91da19209d6bc365c2f4998b4db02967b233a64"} Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.583913 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.611717 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:17 crc kubenswrapper[4723]: E1211 15:27:17.611954 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" containerName="controller-manager" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.618027 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" containerName="controller-manager" Dec 11 15:27:17 crc kubenswrapper[4723]: E1211 15:27:17.618217 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.618229 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.618427 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.618454 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" containerName="controller-manager" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.618882 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.620776 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.645459 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.687218 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles\") pod \"85180921-c4ba-4b06-9240-4d35a5c57248\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.687285 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config\") pod \"85180921-c4ba-4b06-9240-4d35a5c57248\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.687365 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") pod \"85180921-c4ba-4b06-9240-4d35a5c57248\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.687419 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca\") pod \"85180921-c4ba-4b06-9240-4d35a5c57248\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.687450 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") pod \"85180921-c4ba-4b06-9240-4d35a5c57248\" (UID: \"85180921-c4ba-4b06-9240-4d35a5c57248\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.688500 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca" (OuterVolumeSpecName: "client-ca") pod "85180921-c4ba-4b06-9240-4d35a5c57248" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.688691 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "85180921-c4ba-4b06-9240-4d35a5c57248" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.688724 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config" (OuterVolumeSpecName: "config") pod "85180921-c4ba-4b06-9240-4d35a5c57248" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.696669 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc" (OuterVolumeSpecName: "kube-api-access-wh5mc") pod "85180921-c4ba-4b06-9240-4d35a5c57248" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248"). InnerVolumeSpecName "kube-api-access-wh5mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.697046 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "85180921-c4ba-4b06-9240-4d35a5c57248" (UID: "85180921-c4ba-4b06-9240-4d35a5c57248"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.788454 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45vwg\" (UniqueName: \"kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg\") pod \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.788563 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca\") pod \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.788615 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert\") pod \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.788715 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config\") pod \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\" (UID: \"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c\") " Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789432 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca" (OuterVolumeSpecName: "client-ca") pod "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" (UID: "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789547 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config" (OuterVolumeSpecName: "config") pod "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" (UID: "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789678 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789709 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789733 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.789951 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l77xd\" (UniqueName: \"kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790020 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790112 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790131 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85180921-c4ba-4b06-9240-4d35a5c57248-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790140 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790151 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790162 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85180921-c4ba-4b06-9240-4d35a5c57248-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790171 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.790181 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh5mc\" (UniqueName: \"kubernetes.io/projected/85180921-c4ba-4b06-9240-4d35a5c57248-kube-api-access-wh5mc\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.791757 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" (UID: "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.792243 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg" (OuterVolumeSpecName: "kube-api-access-45vwg") pod "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" (UID: "3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c"). InnerVolumeSpecName "kube-api-access-45vwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.798147 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.798247 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7htrb" event={"ID":"85180921-c4ba-4b06-9240-4d35a5c57248","Type":"ContainerDied","Data":"b4f25a96fc42042eb81217e4e4ce3f7ec97d5e93102b4d2b22be94e890f97f39"} Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.798335 4723 scope.go:117] "RemoveContainer" containerID="b35bdad23c2ce8e53c5f7904e91da19209d6bc365c2f4998b4db02967b233a64" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.800131 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" event={"ID":"3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c","Type":"ContainerDied","Data":"27777eee80fb09901002a969b7597c369f88bbf7c5267e04950155ee99cec5d2"} Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.800188 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.816151 4723 scope.go:117] "RemoveContainer" containerID="ce5d5ea832f9f9f280551085324bd397b22c8c3842523318fd55eb965fcb2e09" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.838575 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.844304 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p7nsg"] Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.848825 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.852484 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7htrb"] Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.891792 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.891886 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l77xd\" (UniqueName: \"kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.891912 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.891948 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.892015 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.892052 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45vwg\" (UniqueName: \"kubernetes.io/projected/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-kube-api-access-45vwg\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.892084 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.893575 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.893695 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.893896 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.896005 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.908142 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l77xd\" (UniqueName: \"kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd\") pod \"controller-manager-79b754f979-kcqq7\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:17 crc kubenswrapper[4723]: I1211 15:27:17.954502 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.159907 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:18 crc kubenswrapper[4723]: W1211 15:27:18.168075 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode17446de_b1eb_4b48_8b90_e5446a61c197.slice/crio-aaec0d261aef5b3fd96af15c959190b9c7fe95f695082b0c0c3a7cb2c8444c70 WatchSource:0}: Error finding container aaec0d261aef5b3fd96af15c959190b9c7fe95f695082b0c0c3a7cb2c8444c70: Status 404 returned error can't find the container with id aaec0d261aef5b3fd96af15c959190b9c7fe95f695082b0c0c3a7cb2c8444c70 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.305370 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.305669 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wk4hd" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="registry-server" containerID="cri-o://c12b6a7cbf639d5c261eaaae4736d49be91d67b01ee69fc9af6cb7f40841b4d3" gracePeriod=30 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.313598 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.313835 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hqpzz" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="registry-server" containerID="cri-o://6e745fcdd000061f4789b73f64536e6b57890dc1e471da0f6dd7658ef664f6c1" gracePeriod=30 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.323369 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.325871 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" containerID="cri-o://9502810f0261b67508e3de4d728a242f3f82516c679a6d4bde46cb56f2558183" gracePeriod=30 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.348114 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.348394 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-px8w4" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="registry-server" containerID="cri-o://c0b862e3701beca77ff1258a7ba9788f7c471ef9ce2b2eb40f263c889268e28c" gracePeriod=30 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.365091 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pkdkv"] Dec 11 15:27:18 crc kubenswrapper[4723]: E1211 15:27:18.369382 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerName="route-controller-manager" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.369407 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerName="route-controller-manager" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.369495 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" containerName="route-controller-manager" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.370071 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.373868 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pkdkv"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.376899 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.377130 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8xn7k" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="registry-server" containerID="cri-o://921102dd93e77e4004ea599ae48926db8e3e99aded68fcdb83f78fbefbeb3429" gracePeriod=30 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.498419 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.498511 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9lqh\" (UniqueName: \"kubernetes.io/projected/4d28cf96-8fcf-4934-96bb-36f0482583aa-kube-api-access-j9lqh\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.498569 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.599534 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.599615 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9lqh\" (UniqueName: \"kubernetes.io/projected/4d28cf96-8fcf-4934-96bb-36f0482583aa-kube-api-access-j9lqh\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.599663 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.601142 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.607038 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4d28cf96-8fcf-4934-96bb-36f0482583aa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.618244 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9lqh\" (UniqueName: \"kubernetes.io/projected/4d28cf96-8fcf-4934-96bb-36f0482583aa-kube-api-access-j9lqh\") pod \"marketplace-operator-79b997595-pkdkv\" (UID: \"4d28cf96-8fcf-4934-96bb-36f0482583aa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.690323 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.809452 4723 generic.go:334] "Generic (PLEG): container finished" podID="164595ee-6652-4559-b8dd-7e040aa4602d" containerID="6e745fcdd000061f4789b73f64536e6b57890dc1e471da0f6dd7658ef664f6c1" exitCode=0 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.809518 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerDied","Data":"6e745fcdd000061f4789b73f64536e6b57890dc1e471da0f6dd7658ef664f6c1"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.810726 4723 generic.go:334] "Generic (PLEG): container finished" podID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerID="9502810f0261b67508e3de4d728a242f3f82516c679a6d4bde46cb56f2558183" exitCode=0 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.810792 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" event={"ID":"a7b47ae1-3d79-42e6-b55a-4021723e74d5","Type":"ContainerDied","Data":"9502810f0261b67508e3de4d728a242f3f82516c679a6d4bde46cb56f2558183"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.814994 4723 generic.go:334] "Generic (PLEG): container finished" podID="52430e74-4768-47fa-966f-09160199d877" containerID="921102dd93e77e4004ea599ae48926db8e3e99aded68fcdb83f78fbefbeb3429" exitCode=0 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.815040 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerDied","Data":"921102dd93e77e4004ea599ae48926db8e3e99aded68fcdb83f78fbefbeb3429"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.815058 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8xn7k" event={"ID":"52430e74-4768-47fa-966f-09160199d877","Type":"ContainerDied","Data":"96b3cc2441963f050436ec515ffa74bade2b249c91508228f968d0c3ea3c71fa"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.815072 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96b3cc2441963f050436ec515ffa74bade2b249c91508228f968d0c3ea3c71fa" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.818234 4723 generic.go:334] "Generic (PLEG): container finished" podID="812543bf-43e0-49c1-8348-890db4be7090" containerID="c0b862e3701beca77ff1258a7ba9788f7c471ef9ce2b2eb40f263c889268e28c" exitCode=0 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.818274 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerDied","Data":"c0b862e3701beca77ff1258a7ba9788f7c471ef9ce2b2eb40f263c889268e28c"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.818288 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-px8w4" event={"ID":"812543bf-43e0-49c1-8348-890db4be7090","Type":"ContainerDied","Data":"5bdbf067376bf5d66153e7834f5d1bd15d8bdd98030abf8d3b127991523bf6c3"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.818297 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bdbf067376bf5d66153e7834f5d1bd15d8bdd98030abf8d3b127991523bf6c3" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.825363 4723 generic.go:334] "Generic (PLEG): container finished" podID="935f64e7-557b-4244-b69e-b0943965db19" containerID="c12b6a7cbf639d5c261eaaae4736d49be91d67b01ee69fc9af6cb7f40841b4d3" exitCode=0 Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.825415 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerDied","Data":"c12b6a7cbf639d5c261eaaae4736d49be91d67b01ee69fc9af6cb7f40841b4d3"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.827448 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" event={"ID":"e17446de-b1eb-4b48-8b90-e5446a61c197","Type":"ContainerStarted","Data":"148c13bbfe73bc1b2a52bd7f48059dd9fb8ad84d1d8528008698c26934fec621"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.827479 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" event={"ID":"e17446de-b1eb-4b48-8b90-e5446a61c197","Type":"ContainerStarted","Data":"aaec0d261aef5b3fd96af15c959190b9c7fe95f695082b0c0c3a7cb2c8444c70"} Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.828697 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.840920 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.851778 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.853177 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.854302 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" podStartSLOduration=3.854293624 podStartE2EDuration="3.854293624s" podCreationTimestamp="2025-12-11 15:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:18.852079131 +0000 UTC m=+249.626312586" watchObservedRunningTime="2025-12-11 15:27:18.854293624 +0000 UTC m=+249.628527059" Dec 11 15:27:18 crc kubenswrapper[4723]: I1211 15:27:18.960771 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pkdkv"] Dec 11 15:27:18 crc kubenswrapper[4723]: W1211 15:27:18.985248 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d28cf96_8fcf_4934_96bb_36f0482583aa.slice/crio-d86a0e0f95727e1079db6e0ff5eb70bcc734303674533ae4d9699c3f33c72d9a WatchSource:0}: Error finding container d86a0e0f95727e1079db6e0ff5eb70bcc734303674533ae4d9699c3f33c72d9a: Status 404 returned error can't find the container with id d86a0e0f95727e1079db6e0ff5eb70bcc734303674533ae4d9699c3f33c72d9a Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005326 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities\") pod \"812543bf-43e0-49c1-8348-890db4be7090\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005382 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv9gb\" (UniqueName: \"kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb\") pod \"52430e74-4768-47fa-966f-09160199d877\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005412 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content\") pod \"52430e74-4768-47fa-966f-09160199d877\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005439 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities\") pod \"52430e74-4768-47fa-966f-09160199d877\" (UID: \"52430e74-4768-47fa-966f-09160199d877\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005469 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content\") pod \"812543bf-43e0-49c1-8348-890db4be7090\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.005489 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghcnp\" (UniqueName: \"kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp\") pod \"812543bf-43e0-49c1-8348-890db4be7090\" (UID: \"812543bf-43e0-49c1-8348-890db4be7090\") " Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.007297 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities" (OuterVolumeSpecName: "utilities") pod "812543bf-43e0-49c1-8348-890db4be7090" (UID: "812543bf-43e0-49c1-8348-890db4be7090"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.008614 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities" (OuterVolumeSpecName: "utilities") pod "52430e74-4768-47fa-966f-09160199d877" (UID: "52430e74-4768-47fa-966f-09160199d877"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.013680 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb" (OuterVolumeSpecName: "kube-api-access-kv9gb") pod "52430e74-4768-47fa-966f-09160199d877" (UID: "52430e74-4768-47fa-966f-09160199d877"). InnerVolumeSpecName "kube-api-access-kv9gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.018567 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp" (OuterVolumeSpecName: "kube-api-access-ghcnp") pod "812543bf-43e0-49c1-8348-890db4be7090" (UID: "812543bf-43e0-49c1-8348-890db4be7090"). InnerVolumeSpecName "kube-api-access-ghcnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.032791 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "812543bf-43e0-49c1-8348-890db4be7090" (UID: "812543bf-43e0-49c1-8348-890db4be7090"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.106826 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.106876 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv9gb\" (UniqueName: \"kubernetes.io/projected/52430e74-4768-47fa-966f-09160199d877-kube-api-access-kv9gb\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.106888 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.106899 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/812543bf-43e0-49c1-8348-890db4be7090-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.106911 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghcnp\" (UniqueName: \"kubernetes.io/projected/812543bf-43e0-49c1-8348-890db4be7090-kube-api-access-ghcnp\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.151878 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52430e74-4768-47fa-966f-09160199d877" (UID: "52430e74-4768-47fa-966f-09160199d877"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.208261 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52430e74-4768-47fa-966f-09160199d877-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.555564 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c" path="/var/lib/kubelet/pods/3b1cbd48-3b8f-4da4-ace3-af94ff1cd03c/volumes" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.556140 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85180921-c4ba-4b06-9240-4d35a5c57248" path="/var/lib/kubelet/pods/85180921-c4ba-4b06-9240-4d35a5c57248/volumes" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.836004 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" event={"ID":"4d28cf96-8fcf-4934-96bb-36f0482583aa","Type":"ContainerStarted","Data":"d86a0e0f95727e1079db6e0ff5eb70bcc734303674533ae4d9699c3f33c72d9a"} Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.836067 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-px8w4" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.836031 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8xn7k" Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.854510 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.860066 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8xn7k"] Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.868436 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.871899 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-px8w4"] Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.976035 4723 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-565wv container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Dec 11 15:27:19 crc kubenswrapper[4723]: I1211 15:27:19.976095 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.12:8080/healthz\": dial tcp 10.217.0.12:8080: connect: connection refused" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.005866 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006311 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="extract-content" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006333 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="extract-content" Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006346 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006353 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006361 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="extract-utilities" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006370 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="extract-utilities" Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006379 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="extract-utilities" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006385 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="extract-utilities" Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006398 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="extract-content" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006404 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="extract-content" Dec 11 15:27:20 crc kubenswrapper[4723]: E1211 15:27:20.006414 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006423 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006539 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="52430e74-4768-47fa-966f-09160199d877" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.006558 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="812543bf-43e0-49c1-8348-890db4be7090" containerName="registry-server" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.007201 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.011580 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.011890 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.012275 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.012428 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.012619 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.012909 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.012904 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.020481 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.020641 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kswpn\" (UniqueName: \"kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.020774 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.020810 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.122673 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kswpn\" (UniqueName: \"kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.122797 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.122836 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.122872 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.125264 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.127328 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.141818 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kswpn\" (UniqueName: \"kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.149191 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert\") pod \"route-controller-manager-6465fcd9b9-qhcdl\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.326868 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.406943 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.425355 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content\") pod \"164595ee-6652-4559-b8dd-7e040aa4602d\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.425418 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq4hn\" (UniqueName: \"kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn\") pod \"164595ee-6652-4559-b8dd-7e040aa4602d\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.425440 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities\") pod \"164595ee-6652-4559-b8dd-7e040aa4602d\" (UID: \"164595ee-6652-4559-b8dd-7e040aa4602d\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.426703 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities" (OuterVolumeSpecName: "utilities") pod "164595ee-6652-4559-b8dd-7e040aa4602d" (UID: "164595ee-6652-4559-b8dd-7e040aa4602d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.426881 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.440586 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn" (OuterVolumeSpecName: "kube-api-access-zq4hn") pod "164595ee-6652-4559-b8dd-7e040aa4602d" (UID: "164595ee-6652-4559-b8dd-7e040aa4602d"). InnerVolumeSpecName "kube-api-access-zq4hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.495469 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.535814 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "164595ee-6652-4559-b8dd-7e040aa4602d" (UID: "164595ee-6652-4559-b8dd-7e040aa4602d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537288 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537405 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content\") pod \"935f64e7-557b-4244-b69e-b0943965db19\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537477 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities\") pod \"935f64e7-557b-4244-b69e-b0943965db19\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537537 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hmzt\" (UniqueName: \"kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt\") pod \"935f64e7-557b-4244-b69e-b0943965db19\" (UID: \"935f64e7-557b-4244-b69e-b0943965db19\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537789 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/164595ee-6652-4559-b8dd-7e040aa4602d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.537802 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq4hn\" (UniqueName: \"kubernetes.io/projected/164595ee-6652-4559-b8dd-7e040aa4602d-kube-api-access-zq4hn\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.562679 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities" (OuterVolumeSpecName: "utilities") pod "935f64e7-557b-4244-b69e-b0943965db19" (UID: "935f64e7-557b-4244-b69e-b0943965db19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.565125 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt" (OuterVolumeSpecName: "kube-api-access-6hmzt") pod "935f64e7-557b-4244-b69e-b0943965db19" (UID: "935f64e7-557b-4244-b69e-b0943965db19"). InnerVolumeSpecName "kube-api-access-6hmzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.638420 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbchh\" (UniqueName: \"kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh\") pod \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.638787 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca\") pod \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.638838 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics\") pod \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\" (UID: \"a7b47ae1-3d79-42e6-b55a-4021723e74d5\") " Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.639365 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.639385 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hmzt\" (UniqueName: \"kubernetes.io/projected/935f64e7-557b-4244-b69e-b0943965db19-kube-api-access-6hmzt\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.639810 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a7b47ae1-3d79-42e6-b55a-4021723e74d5" (UID: "a7b47ae1-3d79-42e6-b55a-4021723e74d5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.644851 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a7b47ae1-3d79-42e6-b55a-4021723e74d5" (UID: "a7b47ae1-3d79-42e6-b55a-4021723e74d5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.645228 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh" (OuterVolumeSpecName: "kube-api-access-bbchh") pod "a7b47ae1-3d79-42e6-b55a-4021723e74d5" (UID: "a7b47ae1-3d79-42e6-b55a-4021723e74d5"). InnerVolumeSpecName "kube-api-access-bbchh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.651499 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "935f64e7-557b-4244-b69e-b0943965db19" (UID: "935f64e7-557b-4244-b69e-b0943965db19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.739748 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/935f64e7-557b-4244-b69e-b0943965db19-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.739812 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbchh\" (UniqueName: \"kubernetes.io/projected/a7b47ae1-3d79-42e6-b55a-4021723e74d5-kube-api-access-bbchh\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.739826 4723 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.739838 4723 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a7b47ae1-3d79-42e6-b55a-4021723e74d5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.842204 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" event={"ID":"a7b47ae1-3d79-42e6-b55a-4021723e74d5","Type":"ContainerDied","Data":"babeca3dfc87e06f80dfff2044a00af8910f47a5d8eb554a50d75803c0593bac"} Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.842238 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-565wv" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.842258 4723 scope.go:117] "RemoveContainer" containerID="9502810f0261b67508e3de4d728a242f3f82516c679a6d4bde46cb56f2558183" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.844727 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" event={"ID":"4d28cf96-8fcf-4934-96bb-36f0482583aa","Type":"ContainerStarted","Data":"45bf8da8dc9ce9bd56533c31d66cc02dbac9e7d8f6391790b3f636b09cd21c97"} Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.845311 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.847344 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.849144 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wk4hd" event={"ID":"935f64e7-557b-4244-b69e-b0943965db19","Type":"ContainerDied","Data":"859e766eb6fb19adfed656f984c0956b9bd1fccfaf8ba20d6a95818c694c0311"} Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.849307 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wk4hd" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.855906 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqpzz" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.856076 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqpzz" event={"ID":"164595ee-6652-4559-b8dd-7e040aa4602d","Type":"ContainerDied","Data":"762cb9d78eb716d6043f58d446b1cdb03345926d52b5655274bf367eeb0b3ed7"} Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.875566 4723 scope.go:117] "RemoveContainer" containerID="c12b6a7cbf639d5c261eaaae4736d49be91d67b01ee69fc9af6cb7f40841b4d3" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.891534 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pkdkv" podStartSLOduration=2.891495293 podStartE2EDuration="2.891495293s" podCreationTimestamp="2025-12-11 15:27:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:20.874993473 +0000 UTC m=+251.649226908" watchObservedRunningTime="2025-12-11 15:27:20.891495293 +0000 UTC m=+251.665728738" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.894903 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.900644 4723 scope.go:117] "RemoveContainer" containerID="f02990612a9f6294981cd4e987e8709b954f06db1c714a9de89af8b7961d3755" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.900778 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.903225 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-565wv"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.926639 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:27:20 crc kubenswrapper[4723]: W1211 15:27:20.929684 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6baefd0_cb1d_4cb9_b856_c5492fb9adcf.slice/crio-69c1dededbd4fc64c1d491a1196be0bf8a60b0946051e2399037350d5b70aee0 WatchSource:0}: Error finding container 69c1dededbd4fc64c1d491a1196be0bf8a60b0946051e2399037350d5b70aee0: Status 404 returned error can't find the container with id 69c1dededbd4fc64c1d491a1196be0bf8a60b0946051e2399037350d5b70aee0 Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.934835 4723 scope.go:117] "RemoveContainer" containerID="5ce8e12a4c08971cf7920bd15d5ff94555d40350377d1be624a09a210d8a9b46" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.937988 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wk4hd"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.942066 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.945870 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hqpzz"] Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.957263 4723 scope.go:117] "RemoveContainer" containerID="6e745fcdd000061f4789b73f64536e6b57890dc1e471da0f6dd7658ef664f6c1" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.976935 4723 scope.go:117] "RemoveContainer" containerID="2ace641654a4040d3ef63eda4e0e0618e31f808e96bd54e2bd4068584179d47c" Dec 11 15:27:20 crc kubenswrapper[4723]: I1211 15:27:20.998383 4723 scope.go:117] "RemoveContainer" containerID="ed710ae72560d25b24958b457f0cfbfe351b1c7e1fa7f08f3823b69c8d214f83" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.555601 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" path="/var/lib/kubelet/pods/164595ee-6652-4559-b8dd-7e040aa4602d/volumes" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.556828 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52430e74-4768-47fa-966f-09160199d877" path="/var/lib/kubelet/pods/52430e74-4768-47fa-966f-09160199d877/volumes" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.557577 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="812543bf-43e0-49c1-8348-890db4be7090" path="/var/lib/kubelet/pods/812543bf-43e0-49c1-8348-890db4be7090/volumes" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.558839 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="935f64e7-557b-4244-b69e-b0943965db19" path="/var/lib/kubelet/pods/935f64e7-557b-4244-b69e-b0943965db19/volumes" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.559569 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" path="/var/lib/kubelet/pods/a7b47ae1-3d79-42e6-b55a-4021723e74d5/volumes" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.863197 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" event={"ID":"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf","Type":"ContainerStarted","Data":"77199561c1d0aed819a5da991b85ecf4829588fd39089a58a61c1108c604ee77"} Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.863248 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" event={"ID":"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf","Type":"ContainerStarted","Data":"69c1dededbd4fc64c1d491a1196be0bf8a60b0946051e2399037350d5b70aee0"} Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.863872 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.868482 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:21 crc kubenswrapper[4723]: I1211 15:27:21.892169 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" podStartSLOduration=6.892150872 podStartE2EDuration="6.892150872s" podCreationTimestamp="2025-12-11 15:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:21.888176819 +0000 UTC m=+252.662410264" watchObservedRunningTime="2025-12-11 15:27:21.892150872 +0000 UTC m=+252.666384317" Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.658551 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.659294 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" podUID="e17446de-b1eb-4b48-8b90-e5446a61c197" containerName="controller-manager" containerID="cri-o://148c13bbfe73bc1b2a52bd7f48059dd9fb8ad84d1d8528008698c26934fec621" gracePeriod=30 Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.673326 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.673546 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" podUID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" containerName="route-controller-manager" containerID="cri-o://77199561c1d0aed819a5da991b85ecf4829588fd39089a58a61c1108c604ee77" gracePeriod=30 Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.910447 4723 generic.go:334] "Generic (PLEG): container finished" podID="e17446de-b1eb-4b48-8b90-e5446a61c197" containerID="148c13bbfe73bc1b2a52bd7f48059dd9fb8ad84d1d8528008698c26934fec621" exitCode=0 Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.910547 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" event={"ID":"e17446de-b1eb-4b48-8b90-e5446a61c197","Type":"ContainerDied","Data":"148c13bbfe73bc1b2a52bd7f48059dd9fb8ad84d1d8528008698c26934fec621"} Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.914037 4723 generic.go:334] "Generic (PLEG): container finished" podID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" containerID="77199561c1d0aed819a5da991b85ecf4829588fd39089a58a61c1108c604ee77" exitCode=0 Dec 11 15:27:28 crc kubenswrapper[4723]: I1211 15:27:28.914078 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" event={"ID":"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf","Type":"ContainerDied","Data":"77199561c1d0aed819a5da991b85ecf4829588fd39089a58a61c1108c604ee77"} Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.156013 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.288406 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353449 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config\") pod \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353509 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca\") pod \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353546 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert\") pod \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353583 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kswpn\" (UniqueName: \"kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn\") pod \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\" (UID: \"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353657 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles\") pod \"e17446de-b1eb-4b48-8b90-e5446a61c197\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.353735 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert\") pod \"e17446de-b1eb-4b48-8b90-e5446a61c197\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.354297 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca" (OuterVolumeSpecName: "client-ca") pod "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" (UID: "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.354337 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config" (OuterVolumeSpecName: "config") pod "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" (UID: "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.354890 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e17446de-b1eb-4b48-8b90-e5446a61c197" (UID: "e17446de-b1eb-4b48-8b90-e5446a61c197"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.360915 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" (UID: "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.361044 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn" (OuterVolumeSpecName: "kube-api-access-kswpn") pod "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" (UID: "a6baefd0-cb1d-4cb9-b856-c5492fb9adcf"). InnerVolumeSpecName "kube-api-access-kswpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.374489 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e17446de-b1eb-4b48-8b90-e5446a61c197" (UID: "e17446de-b1eb-4b48-8b90-e5446a61c197"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.455347 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l77xd\" (UniqueName: \"kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd\") pod \"e17446de-b1eb-4b48-8b90-e5446a61c197\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.455405 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config\") pod \"e17446de-b1eb-4b48-8b90-e5446a61c197\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.455578 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca\") pod \"e17446de-b1eb-4b48-8b90-e5446a61c197\" (UID: \"e17446de-b1eb-4b48-8b90-e5446a61c197\") " Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456319 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config" (OuterVolumeSpecName: "config") pod "e17446de-b1eb-4b48-8b90-e5446a61c197" (UID: "e17446de-b1eb-4b48-8b90-e5446a61c197"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456369 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca" (OuterVolumeSpecName: "client-ca") pod "e17446de-b1eb-4b48-8b90-e5446a61c197" (UID: "e17446de-b1eb-4b48-8b90-e5446a61c197"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456445 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kswpn\" (UniqueName: \"kubernetes.io/projected/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-kube-api-access-kswpn\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456466 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456478 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e17446de-b1eb-4b48-8b90-e5446a61c197-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456492 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456501 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.456509 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.458308 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd" (OuterVolumeSpecName: "kube-api-access-l77xd") pod "e17446de-b1eb-4b48-8b90-e5446a61c197" (UID: "e17446de-b1eb-4b48-8b90-e5446a61c197"). InnerVolumeSpecName "kube-api-access-l77xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.557397 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.557426 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l77xd\" (UniqueName: \"kubernetes.io/projected/e17446de-b1eb-4b48-8b90-e5446a61c197-kube-api-access-l77xd\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.557437 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e17446de-b1eb-4b48-8b90-e5446a61c197-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.920304 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" event={"ID":"e17446de-b1eb-4b48-8b90-e5446a61c197","Type":"ContainerDied","Data":"aaec0d261aef5b3fd96af15c959190b9c7fe95f695082b0c0c3a7cb2c8444c70"} Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.920362 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79b754f979-kcqq7" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.920372 4723 scope.go:117] "RemoveContainer" containerID="148c13bbfe73bc1b2a52bd7f48059dd9fb8ad84d1d8528008698c26934fec621" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.923398 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" event={"ID":"a6baefd0-cb1d-4cb9-b856-c5492fb9adcf","Type":"ContainerDied","Data":"69c1dededbd4fc64c1d491a1196be0bf8a60b0946051e2399037350d5b70aee0"} Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.923498 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.941008 4723 scope.go:117] "RemoveContainer" containerID="77199561c1d0aed819a5da991b85ecf4829588fd39089a58a61c1108c604ee77" Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.941146 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.947101 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6465fcd9b9-qhcdl"] Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.950024 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:29 crc kubenswrapper[4723]: I1211 15:27:29.956272 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-79b754f979-kcqq7"] Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012031 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-hf5m6"] Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012258 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" containerName="route-controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012276 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" containerName="route-controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012289 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012297 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012311 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="extract-utilities" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012319 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="extract-utilities" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012330 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="extract-utilities" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012337 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="extract-utilities" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012347 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17446de-b1eb-4b48-8b90-e5446a61c197" containerName="controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012353 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17446de-b1eb-4b48-8b90-e5446a61c197" containerName="controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012363 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012370 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012382 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012389 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012399 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="extract-content" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012406 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="extract-content" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.012416 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="extract-content" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012423 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="extract-content" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012524 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="164595ee-6652-4559-b8dd-7e040aa4602d" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012540 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17446de-b1eb-4b48-8b90-e5446a61c197" containerName="controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012551 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" containerName="route-controller-manager" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012563 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b47ae1-3d79-42e6-b55a-4021723e74d5" containerName="marketplace-operator" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.012575 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="935f64e7-557b-4244-b69e-b0943965db19" containerName="registry-server" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.013016 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.015147 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk"] Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.015608 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.015715 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.015732 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.015738 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.016074 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.016109 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.018341 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019266 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019279 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019277 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019513 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019605 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-hf5m6"] Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.019654 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.022310 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.024408 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk"] Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.027503 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.028477 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-hf5m6"] Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.050291 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk"] Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.074409 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-hsjs8 proxy-ca-bundles serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" podUID="5bc41811-a217-4e94-a706-49079f735ba5" Dec 11 15:27:30 crc kubenswrapper[4723]: E1211 15:27:30.083624 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-5mrnk serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" podUID="5b76277d-d38b-4878-8c7a-585979e4c3a7" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.164446 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.164766 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.164801 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.164819 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.164904 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.165001 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.165036 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsjs8\" (UniqueName: \"kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.165063 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.165101 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mrnk\" (UniqueName: \"kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267044 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267170 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mrnk\" (UniqueName: \"kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267197 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267253 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267412 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267458 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267480 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267514 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.267541 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsjs8\" (UniqueName: \"kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.268488 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.268611 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.268715 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.268956 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.268991 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.274285 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.280756 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.293945 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mrnk\" (UniqueName: \"kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk\") pod \"route-controller-manager-5f9c74ccbd-cpggk\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.302708 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsjs8\" (UniqueName: \"kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8\") pod \"controller-manager-66698fc56-hf5m6\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.932948 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.933038 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.944277 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:30 crc kubenswrapper[4723]: I1211 15:27:30.950099 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.075861 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert\") pod \"5bc41811-a217-4e94-a706-49079f735ba5\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.075927 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles\") pod \"5bc41811-a217-4e94-a706-49079f735ba5\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.075988 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mrnk\" (UniqueName: \"kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk\") pod \"5b76277d-d38b-4878-8c7a-585979e4c3a7\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076045 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config\") pod \"5b76277d-d38b-4878-8c7a-585979e4c3a7\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076081 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca\") pod \"5b76277d-d38b-4878-8c7a-585979e4c3a7\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076119 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca\") pod \"5bc41811-a217-4e94-a706-49079f735ba5\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076171 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsjs8\" (UniqueName: \"kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8\") pod \"5bc41811-a217-4e94-a706-49079f735ba5\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076209 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config\") pod \"5bc41811-a217-4e94-a706-49079f735ba5\" (UID: \"5bc41811-a217-4e94-a706-49079f735ba5\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076283 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert\") pod \"5b76277d-d38b-4878-8c7a-585979e4c3a7\" (UID: \"5b76277d-d38b-4878-8c7a-585979e4c3a7\") " Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.076643 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5bc41811-a217-4e94-a706-49079f735ba5" (UID: "5bc41811-a217-4e94-a706-49079f735ba5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.077098 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca" (OuterVolumeSpecName: "client-ca") pod "5bc41811-a217-4e94-a706-49079f735ba5" (UID: "5bc41811-a217-4e94-a706-49079f735ba5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.077411 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config" (OuterVolumeSpecName: "config") pod "5b76277d-d38b-4878-8c7a-585979e4c3a7" (UID: "5b76277d-d38b-4878-8c7a-585979e4c3a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.077433 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca" (OuterVolumeSpecName: "client-ca") pod "5b76277d-d38b-4878-8c7a-585979e4c3a7" (UID: "5b76277d-d38b-4878-8c7a-585979e4c3a7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.077572 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config" (OuterVolumeSpecName: "config") pod "5bc41811-a217-4e94-a706-49079f735ba5" (UID: "5bc41811-a217-4e94-a706-49079f735ba5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.080148 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5b76277d-d38b-4878-8c7a-585979e4c3a7" (UID: "5b76277d-d38b-4878-8c7a-585979e4c3a7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.080156 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5bc41811-a217-4e94-a706-49079f735ba5" (UID: "5bc41811-a217-4e94-a706-49079f735ba5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.080199 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk" (OuterVolumeSpecName: "kube-api-access-5mrnk") pod "5b76277d-d38b-4878-8c7a-585979e4c3a7" (UID: "5b76277d-d38b-4878-8c7a-585979e4c3a7"). InnerVolumeSpecName "kube-api-access-5mrnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.080317 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8" (OuterVolumeSpecName: "kube-api-access-hsjs8") pod "5bc41811-a217-4e94-a706-49079f735ba5" (UID: "5bc41811-a217-4e94-a706-49079f735ba5"). InnerVolumeSpecName "kube-api-access-hsjs8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178323 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b76277d-d38b-4878-8c7a-585979e4c3a7-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178589 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bc41811-a217-4e94-a706-49079f735ba5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178679 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178752 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mrnk\" (UniqueName: \"kubernetes.io/projected/5b76277d-d38b-4878-8c7a-585979e4c3a7-kube-api-access-5mrnk\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178810 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178875 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b76277d-d38b-4878-8c7a-585979e4c3a7-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.178930 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.179002 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsjs8\" (UniqueName: \"kubernetes.io/projected/5bc41811-a217-4e94-a706-49079f735ba5-kube-api-access-hsjs8\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.179070 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bc41811-a217-4e94-a706-49079f735ba5-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.554215 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6baefd0-cb1d-4cb9-b856-c5492fb9adcf" path="/var/lib/kubelet/pods/a6baefd0-cb1d-4cb9-b856-c5492fb9adcf/volumes" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.554986 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17446de-b1eb-4b48-8b90-e5446a61c197" path="/var/lib/kubelet/pods/e17446de-b1eb-4b48-8b90-e5446a61c197/volumes" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.938790 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-hf5m6" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.938813 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.972002 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.973201 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.977145 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-hf5m6"] Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.977753 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.978054 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.978689 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.979326 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.979516 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.979690 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.984667 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.990109 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-hf5m6"] Dec 11 15:27:31 crc kubenswrapper[4723]: I1211 15:27:31.997949 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.004320 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk"] Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.009170 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-cpggk"] Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.090657 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvwdz\" (UniqueName: \"kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.090715 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.090743 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.090767 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.090789 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.191396 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.191480 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvwdz\" (UniqueName: \"kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.191511 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.191527 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.191547 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.192310 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.193815 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.194369 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.197237 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.207793 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvwdz\" (UniqueName: \"kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz\") pod \"controller-manager-f7fc999fd-7nxp6\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.299516 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.592909 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.945708 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" event={"ID":"ed8b275b-cb9e-4b77-b9ed-7fee909b318d","Type":"ContainerStarted","Data":"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14"} Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.945762 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" event={"ID":"ed8b275b-cb9e-4b77-b9ed-7fee909b318d","Type":"ContainerStarted","Data":"1df8af8ed1d3805f0b0583f2c5f60b026fef18587e270c709bc921b9be3544f6"} Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.946064 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.964314 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:32 crc kubenswrapper[4723]: I1211 15:27:32.964305 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" podStartSLOduration=2.964281159 podStartE2EDuration="2.964281159s" podCreationTimestamp="2025-12-11 15:27:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:32.963228728 +0000 UTC m=+263.737462163" watchObservedRunningTime="2025-12-11 15:27:32.964281159 +0000 UTC m=+263.738514604" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.017575 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh"] Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.018200 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.024291 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.024634 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.025347 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.025388 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.025538 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.027004 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.037002 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh"] Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.103249 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-config\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.103329 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-client-ca\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.103377 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87ff4c42-9c5f-49a1-a576-1578226d135c-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.103417 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp56j\" (UniqueName: \"kubernetes.io/projected/87ff4c42-9c5f-49a1-a576-1578226d135c-kube-api-access-sp56j\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.205162 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-config\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.205253 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-client-ca\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.205292 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87ff4c42-9c5f-49a1-a576-1578226d135c-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.205334 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp56j\" (UniqueName: \"kubernetes.io/projected/87ff4c42-9c5f-49a1-a576-1578226d135c-kube-api-access-sp56j\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.206778 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-client-ca\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.206989 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff4c42-9c5f-49a1-a576-1578226d135c-config\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.212852 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87ff4c42-9c5f-49a1-a576-1578226d135c-serving-cert\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.227153 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp56j\" (UniqueName: \"kubernetes.io/projected/87ff4c42-9c5f-49a1-a576-1578226d135c-kube-api-access-sp56j\") pod \"route-controller-manager-5f9c74ccbd-5sxkh\" (UID: \"87ff4c42-9c5f-49a1-a576-1578226d135c\") " pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.339394 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.560435 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b76277d-d38b-4878-8c7a-585979e4c3a7" path="/var/lib/kubelet/pods/5b76277d-d38b-4878-8c7a-585979e4c3a7/volumes" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.561188 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bc41811-a217-4e94-a706-49079f735ba5" path="/var/lib/kubelet/pods/5bc41811-a217-4e94-a706-49079f735ba5/volumes" Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.745488 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh"] Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.952205 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" event={"ID":"87ff4c42-9c5f-49a1-a576-1578226d135c","Type":"ContainerStarted","Data":"f14218a811dc5f66e8ec94f67b9e6fea36e9e387f32109998ef8826d19db950b"} Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.952249 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" event={"ID":"87ff4c42-9c5f-49a1-a576-1578226d135c","Type":"ContainerStarted","Data":"b1d2865ed67d6caa6a56c2185fa6c89a66fd76b590351dd76c18fcb02f6c2dee"} Dec 11 15:27:33 crc kubenswrapper[4723]: I1211 15:27:33.975062 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" podStartSLOduration=2.975040984 podStartE2EDuration="2.975040984s" podCreationTimestamp="2025-12-11 15:27:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:33.9697469 +0000 UTC m=+264.743980345" watchObservedRunningTime="2025-12-11 15:27:33.975040984 +0000 UTC m=+264.749274419" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.134046 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dh49d"] Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.135138 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.136639 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.142960 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dh49d"] Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.219125 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-utilities\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.219239 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-catalog-content\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.219282 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rftsq\" (UniqueName: \"kubernetes.io/projected/60e763b8-dc9d-4e68-85d5-de6e980b7345-kube-api-access-rftsq\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.320793 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-utilities\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.321271 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-catalog-content\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.321380 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rftsq\" (UniqueName: \"kubernetes.io/projected/60e763b8-dc9d-4e68-85d5-de6e980b7345-kube-api-access-rftsq\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.321381 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-utilities\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.321928 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60e763b8-dc9d-4e68-85d5-de6e980b7345-catalog-content\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.343503 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rftsq\" (UniqueName: \"kubernetes.io/projected/60e763b8-dc9d-4e68-85d5-de6e980b7345-kube-api-access-rftsq\") pod \"community-operators-dh49d\" (UID: \"60e763b8-dc9d-4e68-85d5-de6e980b7345\") " pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.453779 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.957020 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:34 crc kubenswrapper[4723]: I1211 15:27:34.961225 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f9c74ccbd-5sxkh" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.007850 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dh49d"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.132804 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-42q2h"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.134180 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.139765 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.150443 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42q2h"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.233175 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-catalog-content\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.233240 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvwcx\" (UniqueName: \"kubernetes.io/projected/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-kube-api-access-tvwcx\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.233268 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-utilities\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.335983 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-catalog-content\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.336029 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvwcx\" (UniqueName: \"kubernetes.io/projected/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-kube-api-access-tvwcx\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.336053 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-utilities\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.336502 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-utilities\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.339465 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-catalog-content\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.359449 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvwcx\" (UniqueName: \"kubernetes.io/projected/f0c65aaf-bb95-4a84-8b09-ec8c41da00f5-kube-api-access-tvwcx\") pod \"certified-operators-42q2h\" (UID: \"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5\") " pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.449554 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.496042 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.720102 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j2g8h"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.720825 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.751764 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j2g8h"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842326 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-tls\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842391 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842444 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-certificates\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842469 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842551 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-bound-sa-token\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842592 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-trusted-ca\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842625 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.842652 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcd5g\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-kube-api-access-fcd5g\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.886821 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.932230 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42q2h"] Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.943964 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-trusted-ca\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944037 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944058 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcd5g\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-kube-api-access-fcd5g\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944080 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-tls\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944115 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-certificates\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944135 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.944167 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-bound-sa-token\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.946219 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.947202 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-trusted-ca\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.947304 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-certificates\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: W1211 15:27:35.947395 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0c65aaf_bb95_4a84_8b09_ec8c41da00f5.slice/crio-aa63a15dd43d4876996bbe1d8980ff76b17ce397f85ea33a58f5a307a92a37d8 WatchSource:0}: Error finding container aa63a15dd43d4876996bbe1d8980ff76b17ce397f85ea33a58f5a307a92a37d8: Status 404 returned error can't find the container with id aa63a15dd43d4876996bbe1d8980ff76b17ce397f85ea33a58f5a307a92a37d8 Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.952659 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.952679 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-registry-tls\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.969906 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-bound-sa-token\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.971144 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42q2h" event={"ID":"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5","Type":"ContainerStarted","Data":"aa63a15dd43d4876996bbe1d8980ff76b17ce397f85ea33a58f5a307a92a37d8"} Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.972391 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcd5g\" (UniqueName: \"kubernetes.io/projected/4c6697ab-97f7-41c8-ba42-681aa0ba45bf-kube-api-access-fcd5g\") pod \"image-registry-66df7c8f76-j2g8h\" (UID: \"4c6697ab-97f7-41c8-ba42-681aa0ba45bf\") " pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.973510 4723 generic.go:334] "Generic (PLEG): container finished" podID="60e763b8-dc9d-4e68-85d5-de6e980b7345" containerID="613be6a352a47bce54bb56ef9331ca51ca43e63b166ee8eb1cf363f29b7caabe" exitCode=0 Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.974762 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dh49d" event={"ID":"60e763b8-dc9d-4e68-85d5-de6e980b7345","Type":"ContainerDied","Data":"613be6a352a47bce54bb56ef9331ca51ca43e63b166ee8eb1cf363f29b7caabe"} Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.974791 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dh49d" event={"ID":"60e763b8-dc9d-4e68-85d5-de6e980b7345","Type":"ContainerStarted","Data":"09851467db158b5d30460b0349a06b196d46481988cd4b1a6f391701667a73b8"} Dec 11 15:27:35 crc kubenswrapper[4723]: I1211 15:27:35.975135 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" podUID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" containerName="controller-manager" containerID="cri-o://7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14" gracePeriod=30 Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.044340 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.341934 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.343391 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.347218 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.354366 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.451909 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.451991 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.452033 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8zwj\" (UniqueName: \"kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.469408 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j2g8h"] Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.553130 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8zwj\" (UniqueName: \"kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.553567 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.553628 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.554080 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.554111 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.573915 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8zwj\" (UniqueName: \"kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj\") pod \"redhat-operators-b9d67\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.631083 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.668402 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.672211 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-7wp2m"] Dec 11 15:27:36 crc kubenswrapper[4723]: E1211 15:27:36.672474 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" containerName="controller-manager" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.672495 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" containerName="controller-manager" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.673731 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" containerName="controller-manager" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.675293 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.689002 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-7wp2m"] Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.755496 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca\") pod \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.756982 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca" (OuterVolumeSpecName: "client-ca") pod "ed8b275b-cb9e-4b77-b9ed-7fee909b318d" (UID: "ed8b275b-cb9e-4b77-b9ed-7fee909b318d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.757443 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert\") pod \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758208 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config\") pod \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758281 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles\") pod \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758319 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvwdz\" (UniqueName: \"kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz\") pod \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\" (UID: \"ed8b275b-cb9e-4b77-b9ed-7fee909b318d\") " Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758502 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-client-ca\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758550 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-config\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758617 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e485b1-ba6a-4992-8fb9-f52af00a9991-serving-cert\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758676 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-proxy-ca-bundles\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758818 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x8zv\" (UniqueName: \"kubernetes.io/projected/b3e485b1-ba6a-4992-8fb9-f52af00a9991-kube-api-access-2x8zv\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758829 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ed8b275b-cb9e-4b77-b9ed-7fee909b318d" (UID: "ed8b275b-cb9e-4b77-b9ed-7fee909b318d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.758875 4723 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.759340 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config" (OuterVolumeSpecName: "config") pod "ed8b275b-cb9e-4b77-b9ed-7fee909b318d" (UID: "ed8b275b-cb9e-4b77-b9ed-7fee909b318d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.763286 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ed8b275b-cb9e-4b77-b9ed-7fee909b318d" (UID: "ed8b275b-cb9e-4b77-b9ed-7fee909b318d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.763347 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz" (OuterVolumeSpecName: "kube-api-access-wvwdz") pod "ed8b275b-cb9e-4b77-b9ed-7fee909b318d" (UID: "ed8b275b-cb9e-4b77-b9ed-7fee909b318d"). InnerVolumeSpecName "kube-api-access-wvwdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.860106 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-client-ca\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.861515 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-config\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.861367 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-client-ca\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864280 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-config\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864356 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e485b1-ba6a-4992-8fb9-f52af00a9991-serving-cert\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864392 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-proxy-ca-bundles\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864564 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x8zv\" (UniqueName: \"kubernetes.io/projected/b3e485b1-ba6a-4992-8fb9-f52af00a9991-kube-api-access-2x8zv\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864616 4723 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864631 4723 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864642 4723 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.864886 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvwdz\" (UniqueName: \"kubernetes.io/projected/ed8b275b-cb9e-4b77-b9ed-7fee909b318d-kube-api-access-wvwdz\") on node \"crc\" DevicePath \"\"" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.866472 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b3e485b1-ba6a-4992-8fb9-f52af00a9991-proxy-ca-bundles\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.867801 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b3e485b1-ba6a-4992-8fb9-f52af00a9991-serving-cert\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.885076 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x8zv\" (UniqueName: \"kubernetes.io/projected/b3e485b1-ba6a-4992-8fb9-f52af00a9991-kube-api-access-2x8zv\") pod \"controller-manager-66698fc56-7wp2m\" (UID: \"b3e485b1-ba6a-4992-8fb9-f52af00a9991\") " pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.982097 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" event={"ID":"4c6697ab-97f7-41c8-ba42-681aa0ba45bf","Type":"ContainerStarted","Data":"89161dd5cd63e186b0bd80cfdabf12331793b884301018c9bb00ac6ec0a6e885"} Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.982157 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" event={"ID":"4c6697ab-97f7-41c8-ba42-681aa0ba45bf","Type":"ContainerStarted","Data":"3c32a7a42a6f557f919247bb583002469f4d33d6e7bfd49525bea79f3f9ad8ec"} Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.984645 4723 generic.go:334] "Generic (PLEG): container finished" podID="f0c65aaf-bb95-4a84-8b09-ec8c41da00f5" containerID="45c0f04ea32468745787e7885644c206fd42eb2d933057091ad5e480159c943d" exitCode=0 Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.984699 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42q2h" event={"ID":"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5","Type":"ContainerDied","Data":"45c0f04ea32468745787e7885644c206fd42eb2d933057091ad5e480159c943d"} Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.986060 4723 generic.go:334] "Generic (PLEG): container finished" podID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" containerID="7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14" exitCode=0 Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.986116 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.986152 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" event={"ID":"ed8b275b-cb9e-4b77-b9ed-7fee909b318d","Type":"ContainerDied","Data":"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14"} Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.986222 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-7nxp6" event={"ID":"ed8b275b-cb9e-4b77-b9ed-7fee909b318d","Type":"ContainerDied","Data":"1df8af8ed1d3805f0b0583f2c5f60b026fef18587e270c709bc921b9be3544f6"} Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.986243 4723 scope.go:117] "RemoveContainer" containerID="7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14" Dec 11 15:27:36 crc kubenswrapper[4723]: I1211 15:27:36.994886 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.013102 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" podStartSLOduration=2.013082288 podStartE2EDuration="2.013082288s" podCreationTimestamp="2025-12-11 15:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:37.008904156 +0000 UTC m=+267.783137601" watchObservedRunningTime="2025-12-11 15:27:37.013082288 +0000 UTC m=+267.787315713" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.017360 4723 scope.go:117] "RemoveContainer" containerID="7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14" Dec 11 15:27:37 crc kubenswrapper[4723]: E1211 15:27:37.017742 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14\": container with ID starting with 7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14 not found: ID does not exist" containerID="7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.017771 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14"} err="failed to get container status \"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14\": rpc error: code = NotFound desc = could not find container \"7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14\": container with ID starting with 7f2583323bbaa3bf947592bb94571b192e149d588d6b097eccc6e914386d4e14 not found: ID does not exist" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.048599 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.053504 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-7nxp6"] Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.148079 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.463234 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66698fc56-7wp2m"] Dec 11 15:27:37 crc kubenswrapper[4723]: W1211 15:27:37.496992 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3e485b1_ba6a_4992_8fb9_f52af00a9991.slice/crio-ba80b001a1ba73850ce90ae696b3ce3135232081eed43b4a89f60d68e2884d2d WatchSource:0}: Error finding container ba80b001a1ba73850ce90ae696b3ce3135232081eed43b4a89f60d68e2884d2d: Status 404 returned error can't find the container with id ba80b001a1ba73850ce90ae696b3ce3135232081eed43b4a89f60d68e2884d2d Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.559110 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8b275b-cb9e-4b77-b9ed-7fee909b318d" path="/var/lib/kubelet/pods/ed8b275b-cb9e-4b77-b9ed-7fee909b318d/volumes" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.741598 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.743180 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.745217 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.762996 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.886508 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfrt7\" (UniqueName: \"kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.886583 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.886609 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.987515 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfrt7\" (UniqueName: \"kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.987583 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.987599 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.988031 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.988388 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.994570 4723 generic.go:334] "Generic (PLEG): container finished" podID="60e763b8-dc9d-4e68-85d5-de6e980b7345" containerID="0f8b646b360a295f2774848925aaaa71fc8a660a05fc0c8cb96bb29e04d23b65" exitCode=0 Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.994638 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dh49d" event={"ID":"60e763b8-dc9d-4e68-85d5-de6e980b7345","Type":"ContainerDied","Data":"0f8b646b360a295f2774848925aaaa71fc8a660a05fc0c8cb96bb29e04d23b65"} Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.998633 4723 generic.go:334] "Generic (PLEG): container finished" podID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerID="933dd92ac674cff87f146b1fb048912aab672d4ff65cbc17208ebb6da5b0dfbd" exitCode=0 Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.998712 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerDied","Data":"933dd92ac674cff87f146b1fb048912aab672d4ff65cbc17208ebb6da5b0dfbd"} Dec 11 15:27:37 crc kubenswrapper[4723]: I1211 15:27:37.998738 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerStarted","Data":"f2b93c747adff0341c3cbc8fc9ef29b4eae8df85cb9e4788334b2f4189717f86"} Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003643 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" event={"ID":"b3e485b1-ba6a-4992-8fb9-f52af00a9991","Type":"ContainerStarted","Data":"3da41acf159efdb86477ae156e0c8e93dd37c2614a667dc0f3b80fca9f60b0b6"} Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003711 4723 patch_prober.go:28] interesting pod/controller-manager-66698fc56-7wp2m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.69:8443/healthz\": dial tcp 10.217.0.69:8443: connect: connection refused" start-of-body= Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003747 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" podUID="b3e485b1-ba6a-4992-8fb9-f52af00a9991" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.69:8443/healthz\": dial tcp 10.217.0.69:8443: connect: connection refused" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003685 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003840 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.003853 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" event={"ID":"b3e485b1-ba6a-4992-8fb9-f52af00a9991","Type":"ContainerStarted","Data":"ba80b001a1ba73850ce90ae696b3ce3135232081eed43b4a89f60d68e2884d2d"} Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.020737 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfrt7\" (UniqueName: \"kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7\") pod \"redhat-marketplace-lx8rd\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.042876 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" podStartSLOduration=3.042855553 podStartE2EDuration="3.042855553s" podCreationTimestamp="2025-12-11 15:27:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:27:38.035356356 +0000 UTC m=+268.809589801" watchObservedRunningTime="2025-12-11 15:27:38.042855553 +0000 UTC m=+268.817088978" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.058912 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:27:38 crc kubenswrapper[4723]: I1211 15:27:38.611110 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:27:38 crc kubenswrapper[4723]: W1211 15:27:38.627399 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode59bd82f_379d_4813_82be_6fb411f1ebec.slice/crio-20c33e89c0c7a1803937a74aba0e597d02c584789bb6bffa4c9854e67520d84e WatchSource:0}: Error finding container 20c33e89c0c7a1803937a74aba0e597d02c584789bb6bffa4c9854e67520d84e: Status 404 returned error can't find the container with id 20c33e89c0c7a1803937a74aba0e597d02c584789bb6bffa4c9854e67520d84e Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.011663 4723 generic.go:334] "Generic (PLEG): container finished" podID="f0c65aaf-bb95-4a84-8b09-ec8c41da00f5" containerID="9616c419276ac07e206e99593f4c7b6fc277c7e14937b903157955042fd2ec00" exitCode=0 Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.012033 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42q2h" event={"ID":"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5","Type":"ContainerDied","Data":"9616c419276ac07e206e99593f4c7b6fc277c7e14937b903157955042fd2ec00"} Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.015205 4723 generic.go:334] "Generic (PLEG): container finished" podID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerID="996b3b208295912ce21aef3f091f9a2cbebb041ced90740e9f38299b294dc0c5" exitCode=0 Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.015281 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerDied","Data":"996b3b208295912ce21aef3f091f9a2cbebb041ced90740e9f38299b294dc0c5"} Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.015329 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerStarted","Data":"20c33e89c0c7a1803937a74aba0e597d02c584789bb6bffa4c9854e67520d84e"} Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.019119 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dh49d" event={"ID":"60e763b8-dc9d-4e68-85d5-de6e980b7345","Type":"ContainerStarted","Data":"91c622dc6ff2f3d892a23cafe517ced2ca793d30c26e3234aeacf6dd8a28fc97"} Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.024892 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66698fc56-7wp2m" Dec 11 15:27:39 crc kubenswrapper[4723]: I1211 15:27:39.065443 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dh49d" podStartSLOduration=2.46971443 podStartE2EDuration="5.065405661s" podCreationTimestamp="2025-12-11 15:27:34 +0000 UTC" firstStartedPulling="2025-12-11 15:27:35.97532637 +0000 UTC m=+266.749559805" lastFinishedPulling="2025-12-11 15:27:38.571017601 +0000 UTC m=+269.345251036" observedRunningTime="2025-12-11 15:27:39.063614029 +0000 UTC m=+269.837847464" watchObservedRunningTime="2025-12-11 15:27:39.065405661 +0000 UTC m=+269.839639086" Dec 11 15:27:41 crc kubenswrapper[4723]: I1211 15:27:41.032284 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerStarted","Data":"374a6acd089f87060fc7dc23f4e07257d4e56c8c6d0dc818bf14fd6265b7bdf9"} Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.038852 4723 generic.go:334] "Generic (PLEG): container finished" podID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerID="374a6acd089f87060fc7dc23f4e07257d4e56c8c6d0dc818bf14fd6265b7bdf9" exitCode=0 Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.038992 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerDied","Data":"374a6acd089f87060fc7dc23f4e07257d4e56c8c6d0dc818bf14fd6265b7bdf9"} Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.041218 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42q2h" event={"ID":"f0c65aaf-bb95-4a84-8b09-ec8c41da00f5","Type":"ContainerStarted","Data":"5c808c9e07af979e9b7f9e86b30dd320bb331079228ab0276f6d2ee69fbfff89"} Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.048256 4723 generic.go:334] "Generic (PLEG): container finished" podID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerID="2f12350e213bec9f78c23b34d7cd13813fd3f459e92e087dbf49eda3cf27d73d" exitCode=0 Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.048304 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerDied","Data":"2f12350e213bec9f78c23b34d7cd13813fd3f459e92e087dbf49eda3cf27d73d"} Dec 11 15:27:42 crc kubenswrapper[4723]: I1211 15:27:42.097175 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-42q2h" podStartSLOduration=3.033979916 podStartE2EDuration="7.097157811s" podCreationTimestamp="2025-12-11 15:27:35 +0000 UTC" firstStartedPulling="2025-12-11 15:27:36.986765885 +0000 UTC m=+267.760999320" lastFinishedPulling="2025-12-11 15:27:41.04994378 +0000 UTC m=+271.824177215" observedRunningTime="2025-12-11 15:27:42.096296426 +0000 UTC m=+272.870529861" watchObservedRunningTime="2025-12-11 15:27:42.097157811 +0000 UTC m=+272.871391246" Dec 11 15:27:44 crc kubenswrapper[4723]: I1211 15:27:44.454771 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:44 crc kubenswrapper[4723]: I1211 15:27:44.454942 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:44 crc kubenswrapper[4723]: I1211 15:27:44.505443 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:45 crc kubenswrapper[4723]: I1211 15:27:45.112435 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dh49d" Dec 11 15:27:45 crc kubenswrapper[4723]: I1211 15:27:45.449908 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:45 crc kubenswrapper[4723]: I1211 15:27:45.449963 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:45 crc kubenswrapper[4723]: I1211 15:27:45.483069 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:46 crc kubenswrapper[4723]: I1211 15:27:46.103838 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-42q2h" Dec 11 15:27:56 crc kubenswrapper[4723]: I1211 15:27:56.050351 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-j2g8h" Dec 11 15:27:56 crc kubenswrapper[4723]: I1211 15:27:56.114547 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:27:58 crc kubenswrapper[4723]: I1211 15:27:58.133483 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerStarted","Data":"2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322"} Dec 11 15:27:58 crc kubenswrapper[4723]: I1211 15:27:58.136707 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerStarted","Data":"05a0ddf6e0bfbd8eb651d560f47c7da3f69aec4d386b94998034724ffd35769c"} Dec 11 15:27:58 crc kubenswrapper[4723]: I1211 15:27:58.148384 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b9d67" podStartSLOduration=3.193048591 podStartE2EDuration="22.148366336s" podCreationTimestamp="2025-12-11 15:27:36 +0000 UTC" firstStartedPulling="2025-12-11 15:27:37.999780805 +0000 UTC m=+268.774014240" lastFinishedPulling="2025-12-11 15:27:56.95509855 +0000 UTC m=+287.729331985" observedRunningTime="2025-12-11 15:27:58.145733269 +0000 UTC m=+288.919966704" watchObservedRunningTime="2025-12-11 15:27:58.148366336 +0000 UTC m=+288.922599771" Dec 11 15:27:58 crc kubenswrapper[4723]: I1211 15:27:58.161380 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lx8rd" podStartSLOduration=3.224042922 podStartE2EDuration="21.161360592s" podCreationTimestamp="2025-12-11 15:27:37 +0000 UTC" firstStartedPulling="2025-12-11 15:27:39.016813302 +0000 UTC m=+269.791046737" lastFinishedPulling="2025-12-11 15:27:56.954130972 +0000 UTC m=+287.728364407" observedRunningTime="2025-12-11 15:27:58.161217878 +0000 UTC m=+288.935451323" watchObservedRunningTime="2025-12-11 15:27:58.161360592 +0000 UTC m=+288.935594027" Dec 11 15:28:06 crc kubenswrapper[4723]: I1211 15:28:06.669076 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:28:06 crc kubenswrapper[4723]: I1211 15:28:06.669783 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:28:06 crc kubenswrapper[4723]: I1211 15:28:06.705445 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:28:07 crc kubenswrapper[4723]: I1211 15:28:07.224712 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:28:08 crc kubenswrapper[4723]: I1211 15:28:08.059577 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:28:08 crc kubenswrapper[4723]: I1211 15:28:08.060491 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:28:08 crc kubenswrapper[4723]: I1211 15:28:08.124326 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:28:08 crc kubenswrapper[4723]: I1211 15:28:08.277121 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:28:09 crc kubenswrapper[4723]: I1211 15:28:09.410821 4723 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.151587 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" podUID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" containerName="registry" containerID="cri-o://a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b" gracePeriod=30 Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.505571 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663405 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663475 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663536 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smwcw\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663580 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663602 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.663646 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.665016 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.665006 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.665093 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted\") pod \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\" (UID: \"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f\") " Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.665193 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.666059 4723 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.666089 4723 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.671436 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.671757 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.674153 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw" (OuterVolumeSpecName: "kube-api-access-smwcw") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "kube-api-access-smwcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.674794 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.675337 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.682519 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" (UID: "8c1c36ea-a0e0-4973-a356-e069f2eb2d4f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.767231 4723 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.767318 4723 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.767331 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smwcw\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-kube-api-access-smwcw\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.767345 4723 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:21 crc kubenswrapper[4723]: I1211 15:28:21.767357 4723 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.269591 4723 generic.go:334] "Generic (PLEG): container finished" podID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" containerID="a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b" exitCode=0 Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.269934 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" event={"ID":"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f","Type":"ContainerDied","Data":"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b"} Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.269994 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" event={"ID":"8c1c36ea-a0e0-4973-a356-e069f2eb2d4f","Type":"ContainerDied","Data":"a807a4148057781202971bf3fcbdfccd310d3709a91870851ac3152dd9e73b54"} Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.270022 4723 scope.go:117] "RemoveContainer" containerID="a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b" Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.270175 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w957v" Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.289234 4723 scope.go:117] "RemoveContainer" containerID="a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b" Dec 11 15:28:22 crc kubenswrapper[4723]: E1211 15:28:22.289582 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b\": container with ID starting with a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b not found: ID does not exist" containerID="a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b" Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.289616 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b"} err="failed to get container status \"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b\": rpc error: code = NotFound desc = could not find container \"a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b\": container with ID starting with a2ed07ea76d5ca95458b5a19e1ef018237ada2dccd827d9ad1ad887fb9e5054b not found: ID does not exist" Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.306959 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:28:22 crc kubenswrapper[4723]: I1211 15:28:22.311731 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w957v"] Dec 11 15:28:23 crc kubenswrapper[4723]: I1211 15:28:23.554064 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" path="/var/lib/kubelet/pods/8c1c36ea-a0e0-4973-a356-e069f2eb2d4f/volumes" Dec 11 15:29:13 crc kubenswrapper[4723]: I1211 15:29:13.744873 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:29:13 crc kubenswrapper[4723]: I1211 15:29:13.745473 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:29:43 crc kubenswrapper[4723]: I1211 15:29:43.744830 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:29:43 crc kubenswrapper[4723]: I1211 15:29:43.745430 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.197156 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz"] Dec 11 15:30:00 crc kubenswrapper[4723]: E1211 15:30:00.197816 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" containerName="registry" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.197847 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" containerName="registry" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.197951 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c1c36ea-a0e0-4973-a356-e069f2eb2d4f" containerName="registry" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.198402 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.201587 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.201870 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.212709 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz"] Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.350603 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.351467 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvs8c\" (UniqueName: \"kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.351528 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.452921 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.453016 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvs8c\" (UniqueName: \"kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.453067 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.453930 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.459248 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.469434 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvs8c\" (UniqueName: \"kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c\") pod \"collect-profiles-29424450-dtvzz\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.554307 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.720097 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz"] Dec 11 15:30:00 crc kubenswrapper[4723]: I1211 15:30:00.789856 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" event={"ID":"b975c363-e7a4-4d98-ad62-3dae613b3b3a","Type":"ContainerStarted","Data":"f753ef15152b88d4651d428f2774adcdcef58f9cef6ff310be02cc81294ad9ae"} Dec 11 15:30:01 crc kubenswrapper[4723]: I1211 15:30:01.800808 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" event={"ID":"b975c363-e7a4-4d98-ad62-3dae613b3b3a","Type":"ContainerStarted","Data":"1824b372e3b55e560fce2dc2c6dcff112665e90dfcd9a20cdd1e758d4b262353"} Dec 11 15:30:01 crc kubenswrapper[4723]: I1211 15:30:01.818458 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" podStartSLOduration=1.818437514 podStartE2EDuration="1.818437514s" podCreationTimestamp="2025-12-11 15:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:30:01.8148103 +0000 UTC m=+412.589043735" watchObservedRunningTime="2025-12-11 15:30:01.818437514 +0000 UTC m=+412.592670949" Dec 11 15:30:02 crc kubenswrapper[4723]: I1211 15:30:02.807065 4723 generic.go:334] "Generic (PLEG): container finished" podID="b975c363-e7a4-4d98-ad62-3dae613b3b3a" containerID="1824b372e3b55e560fce2dc2c6dcff112665e90dfcd9a20cdd1e758d4b262353" exitCode=0 Dec 11 15:30:02 crc kubenswrapper[4723]: I1211 15:30:02.807108 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" event={"ID":"b975c363-e7a4-4d98-ad62-3dae613b3b3a","Type":"ContainerDied","Data":"1824b372e3b55e560fce2dc2c6dcff112665e90dfcd9a20cdd1e758d4b262353"} Dec 11 15:30:03 crc kubenswrapper[4723]: I1211 15:30:03.995107 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.098615 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume\") pod \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.098703 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvs8c\" (UniqueName: \"kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c\") pod \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.098769 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume\") pod \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\" (UID: \"b975c363-e7a4-4d98-ad62-3dae613b3b3a\") " Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.099312 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume" (OuterVolumeSpecName: "config-volume") pod "b975c363-e7a4-4d98-ad62-3dae613b3b3a" (UID: "b975c363-e7a4-4d98-ad62-3dae613b3b3a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.104173 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b975c363-e7a4-4d98-ad62-3dae613b3b3a" (UID: "b975c363-e7a4-4d98-ad62-3dae613b3b3a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.107017 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c" (OuterVolumeSpecName: "kube-api-access-cvs8c") pod "b975c363-e7a4-4d98-ad62-3dae613b3b3a" (UID: "b975c363-e7a4-4d98-ad62-3dae613b3b3a"). InnerVolumeSpecName "kube-api-access-cvs8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.200594 4723 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b975c363-e7a4-4d98-ad62-3dae613b3b3a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.200625 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvs8c\" (UniqueName: \"kubernetes.io/projected/b975c363-e7a4-4d98-ad62-3dae613b3b3a-kube-api-access-cvs8c\") on node \"crc\" DevicePath \"\"" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.200634 4723 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b975c363-e7a4-4d98-ad62-3dae613b3b3a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.818379 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" event={"ID":"b975c363-e7a4-4d98-ad62-3dae613b3b3a","Type":"ContainerDied","Data":"f753ef15152b88d4651d428f2774adcdcef58f9cef6ff310be02cc81294ad9ae"} Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.818685 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f753ef15152b88d4651d428f2774adcdcef58f9cef6ff310be02cc81294ad9ae" Dec 11 15:30:04 crc kubenswrapper[4723]: I1211 15:30:04.818464 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424450-dtvzz" Dec 11 15:30:09 crc kubenswrapper[4723]: I1211 15:30:09.889943 4723 scope.go:117] "RemoveContainer" containerID="17bab4659994331b17dd16be8b8c95c07d38d8749f175c81583989b0fdf4a276" Dec 11 15:30:09 crc kubenswrapper[4723]: I1211 15:30:09.905144 4723 scope.go:117] "RemoveContainer" containerID="861aafac588c23dc387507ad7c1c59801f5a448a525ef77bec0d0e383fbf7fb2" Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.745139 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.745258 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.745326 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.746239 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.746331 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a" gracePeriod=600 Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.872347 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a" exitCode=0 Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.872395 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a"} Dec 11 15:30:13 crc kubenswrapper[4723]: I1211 15:30:13.872429 4723 scope.go:117] "RemoveContainer" containerID="eea59cca828e649f2ade7213257a4996233bb2461e56f1be372f343c931be07b" Dec 11 15:30:14 crc kubenswrapper[4723]: I1211 15:30:14.878955 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca"} Dec 11 15:32:09 crc kubenswrapper[4723]: I1211 15:32:09.941542 4723 scope.go:117] "RemoveContainer" containerID="78257f73839bb8b43224a27f4df63830c419c2feeec09b8209962c78b3d850e6" Dec 11 15:32:09 crc kubenswrapper[4723]: I1211 15:32:09.969952 4723 scope.go:117] "RemoveContainer" containerID="921102dd93e77e4004ea599ae48926db8e3e99aded68fcdb83f78fbefbeb3429" Dec 11 15:32:13 crc kubenswrapper[4723]: I1211 15:32:13.744918 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:32:13 crc kubenswrapper[4723]: I1211 15:32:13.745656 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:32:43 crc kubenswrapper[4723]: I1211 15:32:43.745656 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:32:43 crc kubenswrapper[4723]: I1211 15:32:43.746304 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.386805 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j6xw5"] Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387630 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-controller" containerID="cri-o://baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387700 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-acl-logging" containerID="cri-o://3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387711 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387753 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-node" containerID="cri-o://a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387671 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="nbdb" containerID="cri-o://8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387812 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="northd" containerID="cri-o://5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.387848 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="sbdb" containerID="cri-o://eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.411679 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovnkube-controller" containerID="cri-o://45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" gracePeriod=30 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.721536 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-acl-logging/0.log" Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722085 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-controller/0.log" Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722906 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" exitCode=0 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722930 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" exitCode=0 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722938 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" exitCode=143 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722946 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" exitCode=143 Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.722990 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.723020 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.723032 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} Dec 11 15:33:00 crc kubenswrapper[4723]: I1211 15:33:00.723046 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.123549 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-acl-logging/0.log" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.124187 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-controller/0.log" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.124602 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176250 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gqgq7"] Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176469 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-node" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176486 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-node" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176502 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b975c363-e7a4-4d98-ad62-3dae613b3b3a" containerName="collect-profiles" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176511 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="b975c363-e7a4-4d98-ad62-3dae613b3b3a" containerName="collect-profiles" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176523 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kubecfg-setup" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176532 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kubecfg-setup" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176541 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="northd" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176548 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="northd" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176560 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-acl-logging" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176567 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-acl-logging" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176577 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="sbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176583 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="sbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176592 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176599 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176609 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176617 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176627 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovnkube-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176635 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovnkube-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.176650 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="nbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176658 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="nbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176768 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-node" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176778 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-acl-logging" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176790 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="kube-rbac-proxy-ovn-metrics" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176801 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="nbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176812 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovnkube-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176820 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="northd" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176828 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="sbdb" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176841 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="b975c363-e7a4-4d98-ad62-3dae613b3b3a" containerName="collect-profiles" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.176852 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerName="ovn-controller" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.178878 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239294 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239348 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhnk2\" (UniqueName: \"kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239368 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239397 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239394 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239428 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239448 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239454 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash" (OuterVolumeSpecName: "host-slash") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239476 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239510 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239523 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239537 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239553 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239569 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239582 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239598 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239620 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239632 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239651 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239669 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239698 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239714 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert\") pod \"485d782b-f4ea-4a0f-8e25-66b50577addf\" (UID: \"485d782b-f4ea-4a0f-8e25-66b50577addf\") " Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239870 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-ovn\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239891 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-kubelet\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239912 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.239928 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-log-socket\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240000 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-slash\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240026 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-netns\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240046 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-config\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240066 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-systemd-units\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240080 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240108 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-script-lib\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240140 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-env-overrides\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240167 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovn-node-metrics-cert\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240189 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-node-log\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240212 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240209 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240224 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log" (OuterVolumeSpecName: "node-log") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240251 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8kb\" (UniqueName: \"kubernetes.io/projected/27e6931e-c9b5-46cc-ba7f-d5f641466d44-kube-api-access-zf8kb\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240278 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-systemd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240280 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240339 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240349 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-bin\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240386 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-netd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240415 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-var-lib-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240432 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-etc-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240513 4723 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240541 4723 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-node-log\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240550 4723 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240559 4723 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240569 4723 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240577 4723 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-slash\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240619 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240639 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240657 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240674 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240693 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240711 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240734 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240756 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240759 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240778 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket" (OuterVolumeSpecName: "log-socket") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.240830 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.245347 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2" (OuterVolumeSpecName: "kube-api-access-lhnk2") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "kube-api-access-lhnk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.245564 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.252384 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "485d782b-f4ea-4a0f-8e25-66b50577addf" (UID: "485d782b-f4ea-4a0f-8e25-66b50577addf"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341406 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8kb\" (UniqueName: \"kubernetes.io/projected/27e6931e-c9b5-46cc-ba7f-d5f641466d44-kube-api-access-zf8kb\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341453 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-systemd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341475 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-bin\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341493 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-netd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341512 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-var-lib-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341526 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-etc-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341550 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-ovn\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341567 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-kubelet\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341585 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-log-socket\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341599 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341609 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-bin\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341674 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-kubelet\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341639 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-slash\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341706 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-etc-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341769 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-log-socket\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341677 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-slash\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341732 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-systemd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341753 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-netns\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341610 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-var-lib-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341755 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-ovn\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341648 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-cni-netd\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341768 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341726 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-netns\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341876 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-config\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341899 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-systemd-units\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341913 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341935 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-script-lib\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341955 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-env-overrides\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341990 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovn-node-metrics-cert\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.341960 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-host-run-ovn-kubernetes\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342007 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-node-log\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342027 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342021 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-systemd-units\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342076 4723 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342093 4723 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-log-socket\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342103 4723 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/485d782b-f4ea-4a0f-8e25-66b50577addf-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342117 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhnk2\" (UniqueName: \"kubernetes.io/projected/485d782b-f4ea-4a0f-8e25-66b50577addf-kube-api-access-lhnk2\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342127 4723 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342137 4723 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342145 4723 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342153 4723 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342161 4723 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/485d782b-f4ea-4a0f-8e25-66b50577addf-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342169 4723 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342177 4723 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342186 4723 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342194 4723 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342204 4723 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/485d782b-f4ea-4a0f-8e25-66b50577addf-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342228 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-run-openvswitch\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342252 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/27e6931e-c9b5-46cc-ba7f-d5f641466d44-node-log\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342612 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-env-overrides\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342664 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-config\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.342903 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovnkube-script-lib\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.345310 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27e6931e-c9b5-46cc-ba7f-d5f641466d44-ovn-node-metrics-cert\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.360088 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8kb\" (UniqueName: \"kubernetes.io/projected/27e6931e-c9b5-46cc-ba7f-d5f641466d44-kube-api-access-zf8kb\") pod \"ovnkube-node-gqgq7\" (UID: \"27e6931e-c9b5-46cc-ba7f-d5f641466d44\") " pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.491603 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.730577 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpc9f_8923b3a7-6d56-4fb6-b496-b718ea3a2071/kube-multus/0.log" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.730924 4723 generic.go:334] "Generic (PLEG): container finished" podID="8923b3a7-6d56-4fb6-b496-b718ea3a2071" containerID="476a1dbd276dda0f93f3b0bd4cd8e1e91de8030b85868a4be8be434f7ea0941a" exitCode=2 Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.731052 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpc9f" event={"ID":"8923b3a7-6d56-4fb6-b496-b718ea3a2071","Type":"ContainerDied","Data":"476a1dbd276dda0f93f3b0bd4cd8e1e91de8030b85868a4be8be434f7ea0941a"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.731431 4723 scope.go:117] "RemoveContainer" containerID="476a1dbd276dda0f93f3b0bd4cd8e1e91de8030b85868a4be8be434f7ea0941a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.733292 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"80d8d5353b1651374f1d7809c866cb31e784fab589c47fed2963a3f7ad471865"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.742348 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-acl-logging/0.log" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.743167 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j6xw5_485d782b-f4ea-4a0f-8e25-66b50577addf/ovn-controller/0.log" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744644 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" exitCode=0 Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744668 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" exitCode=0 Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744676 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" exitCode=0 Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744683 4723 generic.go:334] "Generic (PLEG): container finished" podID="485d782b-f4ea-4a0f-8e25-66b50577addf" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" exitCode=0 Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744704 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744730 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744741 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744752 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744761 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" event={"ID":"485d782b-f4ea-4a0f-8e25-66b50577addf","Type":"ContainerDied","Data":"4f4638fa33cfb3cdc5b4e5f635170d882bbc0f8e116b7bd486c29af5ae07e8b1"} Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744757 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j6xw5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.744776 4723 scope.go:117] "RemoveContainer" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.786930 4723 scope.go:117] "RemoveContainer" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.791534 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j6xw5"] Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.797785 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j6xw5"] Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.807616 4723 scope.go:117] "RemoveContainer" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.821152 4723 scope.go:117] "RemoveContainer" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.835565 4723 scope.go:117] "RemoveContainer" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.849147 4723 scope.go:117] "RemoveContainer" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.862908 4723 scope.go:117] "RemoveContainer" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.875196 4723 scope.go:117] "RemoveContainer" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.889009 4723 scope.go:117] "RemoveContainer" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.900520 4723 scope.go:117] "RemoveContainer" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.901011 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": container with ID starting with 45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a not found: ID does not exist" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901055 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} err="failed to get container status \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": rpc error: code = NotFound desc = could not find container \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": container with ID starting with 45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901087 4723 scope.go:117] "RemoveContainer" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.901392 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": container with ID starting with eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b not found: ID does not exist" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901417 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} err="failed to get container status \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": rpc error: code = NotFound desc = could not find container \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": container with ID starting with eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901434 4723 scope.go:117] "RemoveContainer" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.901702 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": container with ID starting with 8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf not found: ID does not exist" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901729 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} err="failed to get container status \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": rpc error: code = NotFound desc = could not find container \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": container with ID starting with 8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.901754 4723 scope.go:117] "RemoveContainer" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.902064 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": container with ID starting with 5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4 not found: ID does not exist" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902084 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} err="failed to get container status \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": rpc error: code = NotFound desc = could not find container \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": container with ID starting with 5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902101 4723 scope.go:117] "RemoveContainer" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.902322 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": container with ID starting with 52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711 not found: ID does not exist" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902346 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} err="failed to get container status \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": rpc error: code = NotFound desc = could not find container \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": container with ID starting with 52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902391 4723 scope.go:117] "RemoveContainer" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.902629 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": container with ID starting with a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1 not found: ID does not exist" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902651 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} err="failed to get container status \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": rpc error: code = NotFound desc = could not find container \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": container with ID starting with a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902668 4723 scope.go:117] "RemoveContainer" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.902881 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": container with ID starting with 3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5 not found: ID does not exist" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902902 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} err="failed to get container status \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": rpc error: code = NotFound desc = could not find container \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": container with ID starting with 3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.902920 4723 scope.go:117] "RemoveContainer" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.903371 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": container with ID starting with baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a not found: ID does not exist" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.903398 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} err="failed to get container status \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": rpc error: code = NotFound desc = could not find container \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": container with ID starting with baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.903419 4723 scope.go:117] "RemoveContainer" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: E1211 15:33:01.903657 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": container with ID starting with f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c not found: ID does not exist" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.903683 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c"} err="failed to get container status \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": rpc error: code = NotFound desc = could not find container \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": container with ID starting with f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.903703 4723 scope.go:117] "RemoveContainer" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904010 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} err="failed to get container status \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": rpc error: code = NotFound desc = could not find container \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": container with ID starting with 45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904041 4723 scope.go:117] "RemoveContainer" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904381 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} err="failed to get container status \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": rpc error: code = NotFound desc = could not find container \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": container with ID starting with eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904404 4723 scope.go:117] "RemoveContainer" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904803 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} err="failed to get container status \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": rpc error: code = NotFound desc = could not find container \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": container with ID starting with 8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.904830 4723 scope.go:117] "RemoveContainer" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905160 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} err="failed to get container status \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": rpc error: code = NotFound desc = could not find container \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": container with ID starting with 5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905188 4723 scope.go:117] "RemoveContainer" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905418 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} err="failed to get container status \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": rpc error: code = NotFound desc = could not find container \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": container with ID starting with 52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905436 4723 scope.go:117] "RemoveContainer" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905634 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} err="failed to get container status \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": rpc error: code = NotFound desc = could not find container \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": container with ID starting with a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905667 4723 scope.go:117] "RemoveContainer" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905880 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} err="failed to get container status \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": rpc error: code = NotFound desc = could not find container \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": container with ID starting with 3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.905896 4723 scope.go:117] "RemoveContainer" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906128 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} err="failed to get container status \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": rpc error: code = NotFound desc = could not find container \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": container with ID starting with baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906148 4723 scope.go:117] "RemoveContainer" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906369 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c"} err="failed to get container status \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": rpc error: code = NotFound desc = could not find container \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": container with ID starting with f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906389 4723 scope.go:117] "RemoveContainer" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906605 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} err="failed to get container status \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": rpc error: code = NotFound desc = could not find container \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": container with ID starting with 45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906625 4723 scope.go:117] "RemoveContainer" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906858 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} err="failed to get container status \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": rpc error: code = NotFound desc = could not find container \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": container with ID starting with eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.906881 4723 scope.go:117] "RemoveContainer" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907116 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} err="failed to get container status \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": rpc error: code = NotFound desc = could not find container \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": container with ID starting with 8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907148 4723 scope.go:117] "RemoveContainer" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907468 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} err="failed to get container status \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": rpc error: code = NotFound desc = could not find container \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": container with ID starting with 5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907493 4723 scope.go:117] "RemoveContainer" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907792 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} err="failed to get container status \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": rpc error: code = NotFound desc = could not find container \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": container with ID starting with 52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.907816 4723 scope.go:117] "RemoveContainer" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908074 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} err="failed to get container status \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": rpc error: code = NotFound desc = could not find container \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": container with ID starting with a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908101 4723 scope.go:117] "RemoveContainer" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908395 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} err="failed to get container status \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": rpc error: code = NotFound desc = could not find container \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": container with ID starting with 3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908422 4723 scope.go:117] "RemoveContainer" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908818 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} err="failed to get container status \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": rpc error: code = NotFound desc = could not find container \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": container with ID starting with baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.908858 4723 scope.go:117] "RemoveContainer" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.909553 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c"} err="failed to get container status \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": rpc error: code = NotFound desc = could not find container \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": container with ID starting with f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.909610 4723 scope.go:117] "RemoveContainer" containerID="45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910019 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a"} err="failed to get container status \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": rpc error: code = NotFound desc = could not find container \"45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a\": container with ID starting with 45fd296fc79ef66f818287b77a702d49ac2a3fe80716038115d2be7cc289f26a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910059 4723 scope.go:117] "RemoveContainer" containerID="eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910399 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b"} err="failed to get container status \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": rpc error: code = NotFound desc = could not find container \"eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b\": container with ID starting with eee63aaade0a3561010e8899ef5e59adffd131ba2c3e88919147b1cc5a34723b not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910426 4723 scope.go:117] "RemoveContainer" containerID="8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910731 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf"} err="failed to get container status \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": rpc error: code = NotFound desc = could not find container \"8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf\": container with ID starting with 8cd684d34ab13d4cdd2aea805a24ef416779a8c126f6602fd9fa07db2550b0cf not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.910784 4723 scope.go:117] "RemoveContainer" containerID="5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911210 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4"} err="failed to get container status \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": rpc error: code = NotFound desc = could not find container \"5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4\": container with ID starting with 5802f64b396143110249762d9ebbf209615b837e7bf7a3ee3a30d61b86e2f0c4 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911236 4723 scope.go:117] "RemoveContainer" containerID="52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911614 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711"} err="failed to get container status \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": rpc error: code = NotFound desc = could not find container \"52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711\": container with ID starting with 52088ab9f4b826c1c86a185852e67e2f647581489539814d7e07e98c86f23711 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911635 4723 scope.go:117] "RemoveContainer" containerID="a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911887 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1"} err="failed to get container status \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": rpc error: code = NotFound desc = could not find container \"a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1\": container with ID starting with a9bb7b452f0cbd21631a4fd058d76bddc40aca5537fbeb0807c08dea9a008ac1 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.911919 4723 scope.go:117] "RemoveContainer" containerID="3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.912245 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5"} err="failed to get container status \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": rpc error: code = NotFound desc = could not find container \"3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5\": container with ID starting with 3f5f91b4ce4a58b0dee765b9930262193fe20cf8aa6176853667eaa260406ba5 not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.912263 4723 scope.go:117] "RemoveContainer" containerID="baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.912496 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a"} err="failed to get container status \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": rpc error: code = NotFound desc = could not find container \"baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a\": container with ID starting with baba9d256e5c15ccc2b19582a280107910f200e695b5d48218d822d8ecf8205a not found: ID does not exist" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.912522 4723 scope.go:117] "RemoveContainer" containerID="f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c" Dec 11 15:33:01 crc kubenswrapper[4723]: I1211 15:33:01.912782 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c"} err="failed to get container status \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": rpc error: code = NotFound desc = could not find container \"f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c\": container with ID starting with f356ec081910b9e29214cc7ef2c2a62637e7b14dd6ded65c040a77dac269c68c not found: ID does not exist" Dec 11 15:33:03 crc kubenswrapper[4723]: I1211 15:33:03.555167 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485d782b-f4ea-4a0f-8e25-66b50577addf" path="/var/lib/kubelet/pods/485d782b-f4ea-4a0f-8e25-66b50577addf/volumes" Dec 11 15:33:03 crc kubenswrapper[4723]: I1211 15:33:03.759368 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpc9f_8923b3a7-6d56-4fb6-b496-b718ea3a2071/kube-multus/0.log" Dec 11 15:33:03 crc kubenswrapper[4723]: I1211 15:33:03.759459 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpc9f" event={"ID":"8923b3a7-6d56-4fb6-b496-b718ea3a2071","Type":"ContainerStarted","Data":"32a8d63d3660f5e5107fb062fe4db2366d7e8d5babe72a433a9451ca0215c0f9"} Dec 11 15:33:03 crc kubenswrapper[4723]: I1211 15:33:03.762074 4723 generic.go:334] "Generic (PLEG): container finished" podID="27e6931e-c9b5-46cc-ba7f-d5f641466d44" containerID="3675361580aa9cbadf7596edbeef7fce2d21390adce047515d230c69e4469957" exitCode=0 Dec 11 15:33:03 crc kubenswrapper[4723]: I1211 15:33:03.762171 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerDied","Data":"3675361580aa9cbadf7596edbeef7fce2d21390adce047515d230c69e4469957"} Dec 11 15:33:04 crc kubenswrapper[4723]: I1211 15:33:04.768566 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"ab6529861377ebd3a0207c46fa9f4210b20e7d1d3390b901888a5f94904f7746"} Dec 11 15:33:04 crc kubenswrapper[4723]: I1211 15:33:04.768836 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"8e52bf567b245e7e3b5305214aa3f0e006ca132742369e9f92cb67f769713454"} Dec 11 15:33:05 crc kubenswrapper[4723]: I1211 15:33:05.776858 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"c9d13b8a788ed636fcf6a710889e58cd1a584c3f9255649ad55d1583a06dfca5"} Dec 11 15:33:05 crc kubenswrapper[4723]: I1211 15:33:05.777148 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"bc4fa4cf634bd590e0d1904549ca9ccc339b05b7a9c0dce134e6879c5b84f2e8"} Dec 11 15:33:06 crc kubenswrapper[4723]: I1211 15:33:06.785664 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"8715bd6c075a749bc1b901c5faa39c226ab67e35be497634e17080d8fe4199a6"} Dec 11 15:33:06 crc kubenswrapper[4723]: I1211 15:33:06.786054 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"9693f2af8a390c130bded5e3eb7bb1e2969e7fbc41851588ff115906b3e2ed9c"} Dec 11 15:33:08 crc kubenswrapper[4723]: I1211 15:33:08.799649 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"ecfc6d54b844ab93d688890f5093590df82c749bfe32f8a76802a2986c7d9262"} Dec 11 15:33:10 crc kubenswrapper[4723]: I1211 15:33:10.023228 4723 scope.go:117] "RemoveContainer" containerID="c0b862e3701beca77ff1258a7ba9788f7c471ef9ce2b2eb40f263c889268e28c" Dec 11 15:33:10 crc kubenswrapper[4723]: I1211 15:33:10.037147 4723 scope.go:117] "RemoveContainer" containerID="4cc0e579e4ffccf56f376d1a0a58377bc37aa6202056f499de3c4c6186b9d623" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.817336 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" event={"ID":"27e6931e-c9b5-46cc-ba7f-d5f641466d44","Type":"ContainerStarted","Data":"f9d1c593b69294518e45ea2ec3757f64d978dd4bee13f88305058c01e9254e84"} Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.817826 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.817843 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.819356 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.851833 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.852163 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:33:11 crc kubenswrapper[4723]: I1211 15:33:11.853436 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" podStartSLOduration=10.853418379 podStartE2EDuration="10.853418379s" podCreationTimestamp="2025-12-11 15:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:33:11.848707123 +0000 UTC m=+602.622940558" watchObservedRunningTime="2025-12-11 15:33:11.853418379 +0000 UTC m=+602.627651814" Dec 11 15:33:13 crc kubenswrapper[4723]: I1211 15:33:13.744904 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:33:13 crc kubenswrapper[4723]: I1211 15:33:13.745366 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:33:13 crc kubenswrapper[4723]: I1211 15:33:13.745415 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:33:13 crc kubenswrapper[4723]: I1211 15:33:13.745962 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:33:13 crc kubenswrapper[4723]: I1211 15:33:13.746035 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca" gracePeriod=600 Dec 11 15:33:14 crc kubenswrapper[4723]: I1211 15:33:14.839073 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca" exitCode=0 Dec 11 15:33:14 crc kubenswrapper[4723]: I1211 15:33:14.839124 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca"} Dec 11 15:33:14 crc kubenswrapper[4723]: I1211 15:33:14.839850 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e"} Dec 11 15:33:14 crc kubenswrapper[4723]: I1211 15:33:14.839876 4723 scope.go:117] "RemoveContainer" containerID="13f30ec6b4ba49b633e30eb57b2722a37e416c3766af4898268c21192f72194a" Dec 11 15:33:31 crc kubenswrapper[4723]: I1211 15:33:31.514763 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gqgq7" Dec 11 15:34:01 crc kubenswrapper[4723]: I1211 15:34:01.696990 4723 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 15:34:13 crc kubenswrapper[4723]: I1211 15:34:13.712672 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:34:13 crc kubenswrapper[4723]: I1211 15:34:13.714205 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lx8rd" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="registry-server" containerID="cri-o://05a0ddf6e0bfbd8eb651d560f47c7da3f69aec4d386b94998034724ffd35769c" gracePeriod=30 Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.145588 4723 generic.go:334] "Generic (PLEG): container finished" podID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerID="05a0ddf6e0bfbd8eb651d560f47c7da3f69aec4d386b94998034724ffd35769c" exitCode=0 Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.145635 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerDied","Data":"05a0ddf6e0bfbd8eb651d560f47c7da3f69aec4d386b94998034724ffd35769c"} Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.537569 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.588336 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities\") pod \"e59bd82f-379d-4813-82be-6fb411f1ebec\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.588412 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content\") pod \"e59bd82f-379d-4813-82be-6fb411f1ebec\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.588490 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfrt7\" (UniqueName: \"kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7\") pod \"e59bd82f-379d-4813-82be-6fb411f1ebec\" (UID: \"e59bd82f-379d-4813-82be-6fb411f1ebec\") " Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.589371 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities" (OuterVolumeSpecName: "utilities") pod "e59bd82f-379d-4813-82be-6fb411f1ebec" (UID: "e59bd82f-379d-4813-82be-6fb411f1ebec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.593626 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7" (OuterVolumeSpecName: "kube-api-access-kfrt7") pod "e59bd82f-379d-4813-82be-6fb411f1ebec" (UID: "e59bd82f-379d-4813-82be-6fb411f1ebec"). InnerVolumeSpecName "kube-api-access-kfrt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.611024 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e59bd82f-379d-4813-82be-6fb411f1ebec" (UID: "e59bd82f-379d-4813-82be-6fb411f1ebec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.690060 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.690107 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e59bd82f-379d-4813-82be-6fb411f1ebec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:14 crc kubenswrapper[4723]: I1211 15:34:14.690122 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfrt7\" (UniqueName: \"kubernetes.io/projected/e59bd82f-379d-4813-82be-6fb411f1ebec-kube-api-access-kfrt7\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.151901 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lx8rd" event={"ID":"e59bd82f-379d-4813-82be-6fb411f1ebec","Type":"ContainerDied","Data":"20c33e89c0c7a1803937a74aba0e597d02c584789bb6bffa4c9854e67520d84e"} Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.151952 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lx8rd" Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.151964 4723 scope.go:117] "RemoveContainer" containerID="05a0ddf6e0bfbd8eb651d560f47c7da3f69aec4d386b94998034724ffd35769c" Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.166256 4723 scope.go:117] "RemoveContainer" containerID="2f12350e213bec9f78c23b34d7cd13813fd3f459e92e087dbf49eda3cf27d73d" Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.182117 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.186671 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lx8rd"] Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.196671 4723 scope.go:117] "RemoveContainer" containerID="996b3b208295912ce21aef3f091f9a2cbebb041ced90740e9f38299b294dc0c5" Dec 11 15:34:15 crc kubenswrapper[4723]: I1211 15:34:15.554396 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" path="/var/lib/kubelet/pods/e59bd82f-379d-4813-82be-6fb411f1ebec/volumes" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.425476 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9"] Dec 11 15:34:17 crc kubenswrapper[4723]: E1211 15:34:17.425970 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="registry-server" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.426000 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="registry-server" Dec 11 15:34:17 crc kubenswrapper[4723]: E1211 15:34:17.426024 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="extract-content" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.426030 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="extract-content" Dec 11 15:34:17 crc kubenswrapper[4723]: E1211 15:34:17.426039 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="extract-utilities" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.426046 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="extract-utilities" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.426125 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="e59bd82f-379d-4813-82be-6fb411f1ebec" containerName="registry-server" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.426755 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.428547 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.437384 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9"] Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.522870 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.522925 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.523002 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knq7w\" (UniqueName: \"kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.624165 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knq7w\" (UniqueName: \"kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.624244 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.624274 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.624732 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.625209 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.643703 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knq7w\" (UniqueName: \"kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.741243 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:17 crc kubenswrapper[4723]: I1211 15:34:17.911490 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9"] Dec 11 15:34:18 crc kubenswrapper[4723]: I1211 15:34:18.167364 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerStarted","Data":"445d3f8a9935f72cf1c6e9674cdcc4f77721ef0ee6e7ccb0d3b75a3a4706407a"} Dec 11 15:34:18 crc kubenswrapper[4723]: I1211 15:34:18.167406 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerStarted","Data":"0d759d09fdde855840d515dbbe3412eafe502f599190a8455053355663f68356"} Dec 11 15:34:19 crc kubenswrapper[4723]: I1211 15:34:19.173797 4723 generic.go:334] "Generic (PLEG): container finished" podID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerID="445d3f8a9935f72cf1c6e9674cdcc4f77721ef0ee6e7ccb0d3b75a3a4706407a" exitCode=0 Dec 11 15:34:19 crc kubenswrapper[4723]: I1211 15:34:19.174379 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerDied","Data":"445d3f8a9935f72cf1c6e9674cdcc4f77721ef0ee6e7ccb0d3b75a3a4706407a"} Dec 11 15:34:19 crc kubenswrapper[4723]: I1211 15:34:19.177437 4723 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.392420 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.394677 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.407344 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.564448 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqncz\" (UniqueName: \"kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.564783 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.564842 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.666019 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.666123 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqncz\" (UniqueName: \"kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.666168 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.666614 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.666650 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.685303 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqncz\" (UniqueName: \"kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz\") pod \"redhat-operators-x7d5c\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.751465 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:20 crc kubenswrapper[4723]: I1211 15:34:20.930754 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:21 crc kubenswrapper[4723]: I1211 15:34:21.187365 4723 generic.go:334] "Generic (PLEG): container finished" podID="23ba2517-d363-417e-8244-ae29e5da10ce" containerID="ccc846b7a7a6a1d2f3f4d2dff9b5fcbfdc4e3cc3be1ecb1bc02c7b9ac15946ef" exitCode=0 Dec 11 15:34:21 crc kubenswrapper[4723]: I1211 15:34:21.187500 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerDied","Data":"ccc846b7a7a6a1d2f3f4d2dff9b5fcbfdc4e3cc3be1ecb1bc02c7b9ac15946ef"} Dec 11 15:34:21 crc kubenswrapper[4723]: I1211 15:34:21.187747 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerStarted","Data":"30d5ed6313641efa4443668ba11830f5e5649b8d69493e66d5fb809f59a3704b"} Dec 11 15:34:21 crc kubenswrapper[4723]: I1211 15:34:21.190314 4723 generic.go:334] "Generic (PLEG): container finished" podID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerID="0db60a5ea89fbc8e014d3f857996ebdf562457d8c2a5f401e925b2a03adefb81" exitCode=0 Dec 11 15:34:21 crc kubenswrapper[4723]: I1211 15:34:21.190346 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerDied","Data":"0db60a5ea89fbc8e014d3f857996ebdf562457d8c2a5f401e925b2a03adefb81"} Dec 11 15:34:22 crc kubenswrapper[4723]: I1211 15:34:22.204016 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerStarted","Data":"efa87e30296c4b20212ecd38c68151ecb88d46e420acd523f0c92b76a036a1ed"} Dec 11 15:34:22 crc kubenswrapper[4723]: I1211 15:34:22.207235 4723 generic.go:334] "Generic (PLEG): container finished" podID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerID="62d1b2a052a65b83547aebbf23fddf8a2de102fe83a81f96e0f598903a96f9f4" exitCode=0 Dec 11 15:34:22 crc kubenswrapper[4723]: I1211 15:34:22.207288 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerDied","Data":"62d1b2a052a65b83547aebbf23fddf8a2de102fe83a81f96e0f598903a96f9f4"} Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.215115 4723 generic.go:334] "Generic (PLEG): container finished" podID="23ba2517-d363-417e-8244-ae29e5da10ce" containerID="efa87e30296c4b20212ecd38c68151ecb88d46e420acd523f0c92b76a036a1ed" exitCode=0 Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.215235 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerDied","Data":"efa87e30296c4b20212ecd38c68151ecb88d46e420acd523f0c92b76a036a1ed"} Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.452287 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.496882 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knq7w\" (UniqueName: \"kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w\") pod \"f441b93b-02ba-484f-90a6-c0b1eb50fade\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.497372 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle\") pod \"f441b93b-02ba-484f-90a6-c0b1eb50fade\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.497416 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util\") pod \"f441b93b-02ba-484f-90a6-c0b1eb50fade\" (UID: \"f441b93b-02ba-484f-90a6-c0b1eb50fade\") " Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.499838 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle" (OuterVolumeSpecName: "bundle") pod "f441b93b-02ba-484f-90a6-c0b1eb50fade" (UID: "f441b93b-02ba-484f-90a6-c0b1eb50fade"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.503466 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w" (OuterVolumeSpecName: "kube-api-access-knq7w") pod "f441b93b-02ba-484f-90a6-c0b1eb50fade" (UID: "f441b93b-02ba-484f-90a6-c0b1eb50fade"). InnerVolumeSpecName "kube-api-access-knq7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.511678 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util" (OuterVolumeSpecName: "util") pod "f441b93b-02ba-484f-90a6-c0b1eb50fade" (UID: "f441b93b-02ba-484f-90a6-c0b1eb50fade"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.598798 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.599216 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f441b93b-02ba-484f-90a6-c0b1eb50fade-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:23 crc kubenswrapper[4723]: I1211 15:34:23.599474 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knq7w\" (UniqueName: \"kubernetes.io/projected/f441b93b-02ba-484f-90a6-c0b1eb50fade-kube-api-access-knq7w\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.222398 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.222373 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9" event={"ID":"f441b93b-02ba-484f-90a6-c0b1eb50fade","Type":"ContainerDied","Data":"0d759d09fdde855840d515dbbe3412eafe502f599190a8455053355663f68356"} Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.222658 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d759d09fdde855840d515dbbe3412eafe502f599190a8455053355663f68356" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.224529 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerStarted","Data":"d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f"} Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.243247 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x7d5c" podStartSLOduration=1.682775956 podStartE2EDuration="4.243228982s" podCreationTimestamp="2025-12-11 15:34:20 +0000 UTC" firstStartedPulling="2025-12-11 15:34:21.189180141 +0000 UTC m=+671.963413576" lastFinishedPulling="2025-12-11 15:34:23.749633157 +0000 UTC m=+674.523866602" observedRunningTime="2025-12-11 15:34:24.242444391 +0000 UTC m=+675.016677826" watchObservedRunningTime="2025-12-11 15:34:24.243228982 +0000 UTC m=+675.017462437" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.428949 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz"] Dec 11 15:34:24 crc kubenswrapper[4723]: E1211 15:34:24.429550 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="extract" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.429563 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="extract" Dec 11 15:34:24 crc kubenswrapper[4723]: E1211 15:34:24.429574 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="pull" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.429580 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="pull" Dec 11 15:34:24 crc kubenswrapper[4723]: E1211 15:34:24.429597 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="util" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.429603 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="util" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.429706 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="f441b93b-02ba-484f-90a6-c0b1eb50fade" containerName="extract" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.430519 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.432858 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.442365 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz"] Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.511001 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.511089 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.511124 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cshzv\" (UniqueName: \"kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.613416 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.612482 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.613589 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.614105 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.614211 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cshzv\" (UniqueName: \"kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.633426 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cshzv\" (UniqueName: \"kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:24 crc kubenswrapper[4723]: I1211 15:34:24.745492 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:25 crc kubenswrapper[4723]: I1211 15:34:25.143739 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz"] Dec 11 15:34:25 crc kubenswrapper[4723]: I1211 15:34:25.230637 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" event={"ID":"90635432-311d-4b2c-8d71-bbd56f653a41","Type":"ContainerStarted","Data":"f052b69f51a2fd452bdc88f3d78ca888ac2275925a1bbd322d46419ef8c1f54a"} Dec 11 15:34:26 crc kubenswrapper[4723]: I1211 15:34:26.237551 4723 generic.go:334] "Generic (PLEG): container finished" podID="90635432-311d-4b2c-8d71-bbd56f653a41" containerID="203d005e0d8bb4e6edc0b7397a81b4cf90f5266c164d30b46591cfc79c39a015" exitCode=0 Dec 11 15:34:26 crc kubenswrapper[4723]: I1211 15:34:26.237610 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" event={"ID":"90635432-311d-4b2c-8d71-bbd56f653a41","Type":"ContainerDied","Data":"203d005e0d8bb4e6edc0b7397a81b4cf90f5266c164d30b46591cfc79c39a015"} Dec 11 15:34:28 crc kubenswrapper[4723]: I1211 15:34:28.998361 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:34:28 crc kubenswrapper[4723]: I1211 15:34:28.999629 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.014850 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.187895 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.187959 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g98h7\" (UniqueName: \"kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.187990 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.259721 4723 generic.go:334] "Generic (PLEG): container finished" podID="90635432-311d-4b2c-8d71-bbd56f653a41" containerID="2c04a9fb668b86bf64cb2a0be8959a48f7a737aebe0773a89faf6db8de990a48" exitCode=0 Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.259773 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" event={"ID":"90635432-311d-4b2c-8d71-bbd56f653a41","Type":"ContainerDied","Data":"2c04a9fb668b86bf64cb2a0be8959a48f7a737aebe0773a89faf6db8de990a48"} Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.289071 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g98h7\" (UniqueName: \"kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.289126 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.289632 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.289650 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.289263 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.323228 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g98h7\" (UniqueName: \"kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7\") pod \"certified-operators-x7fch\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:29 crc kubenswrapper[4723]: I1211 15:34:29.360175 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.044265 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.300375 4723 generic.go:334] "Generic (PLEG): container finished" podID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerID="52c36f8c849ca4677393fe82b8fc73a38a6fb224393dbae1ed5e12dd2ac94194" exitCode=0 Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.300463 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerDied","Data":"52c36f8c849ca4677393fe82b8fc73a38a6fb224393dbae1ed5e12dd2ac94194"} Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.300489 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerStarted","Data":"1cd1b048a98a2c86f44ac423dc7c1e64cd683b1aa642c0c698de2c6f38f06dd5"} Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.320065 4723 generic.go:334] "Generic (PLEG): container finished" podID="90635432-311d-4b2c-8d71-bbd56f653a41" containerID="a5973d718115407bf08eb21ced4de98ee7be42bd13e15c2da07c8e6caf6a75d9" exitCode=0 Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.320112 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" event={"ID":"90635432-311d-4b2c-8d71-bbd56f653a41","Type":"ContainerDied","Data":"a5973d718115407bf08eb21ced4de98ee7be42bd13e15c2da07c8e6caf6a75d9"} Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.752002 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.752060 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.892549 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z"] Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.893537 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:30 crc kubenswrapper[4723]: I1211 15:34:30.914229 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z"] Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.013540 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltmsk\" (UniqueName: \"kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.013597 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.013630 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.115030 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltmsk\" (UniqueName: \"kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.115074 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.115094 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.115497 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.115699 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.151621 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltmsk\" (UniqueName: \"kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.213427 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.863618 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x7d5c" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" probeResult="failure" output=< Dec 11 15:34:31 crc kubenswrapper[4723]: timeout: failed to connect service ":50051" within 1s Dec 11 15:34:31 crc kubenswrapper[4723]: > Dec 11 15:34:31 crc kubenswrapper[4723]: I1211 15:34:31.929914 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z"] Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.136818 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.223136 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle\") pod \"90635432-311d-4b2c-8d71-bbd56f653a41\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.223192 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cshzv\" (UniqueName: \"kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv\") pod \"90635432-311d-4b2c-8d71-bbd56f653a41\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.223223 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util\") pod \"90635432-311d-4b2c-8d71-bbd56f653a41\" (UID: \"90635432-311d-4b2c-8d71-bbd56f653a41\") " Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.230159 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle" (OuterVolumeSpecName: "bundle") pod "90635432-311d-4b2c-8d71-bbd56f653a41" (UID: "90635432-311d-4b2c-8d71-bbd56f653a41"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.249229 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util" (OuterVolumeSpecName: "util") pod "90635432-311d-4b2c-8d71-bbd56f653a41" (UID: "90635432-311d-4b2c-8d71-bbd56f653a41"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.269134 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv" (OuterVolumeSpecName: "kube-api-access-cshzv") pod "90635432-311d-4b2c-8d71-bbd56f653a41" (UID: "90635432-311d-4b2c-8d71-bbd56f653a41"). InnerVolumeSpecName "kube-api-access-cshzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.324298 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.324820 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90635432-311d-4b2c-8d71-bbd56f653a41-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.324834 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cshzv\" (UniqueName: \"kubernetes.io/projected/90635432-311d-4b2c-8d71-bbd56f653a41-kube-api-access-cshzv\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.510462 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerStarted","Data":"04339d6ed8cdb9d822ec04aac2a664b3f3833f81013b26ddd5898ff94a818005"} Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.510512 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerStarted","Data":"0ee68e0513190e44209ad56b5925019bd8165240c0b4b8b4c8e682e4bf10418c"} Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.513033 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" event={"ID":"90635432-311d-4b2c-8d71-bbd56f653a41","Type":"ContainerDied","Data":"f052b69f51a2fd452bdc88f3d78ca888ac2275925a1bbd322d46419ef8c1f54a"} Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.513057 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f052b69f51a2fd452bdc88f3d78ca888ac2275925a1bbd322d46419ef8c1f54a" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.513114 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz" Dec 11 15:34:32 crc kubenswrapper[4723]: I1211 15:34:32.515631 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerStarted","Data":"519ff83e025fa30e568aac037f47890cbf7cf50cddb103aa6880032327ecb338"} Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287119 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg"] Dec 11 15:34:34 crc kubenswrapper[4723]: E1211 15:34:34.287323 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="pull" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287334 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="pull" Dec 11 15:34:34 crc kubenswrapper[4723]: E1211 15:34:34.287344 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="util" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287350 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="util" Dec 11 15:34:34 crc kubenswrapper[4723]: E1211 15:34:34.287360 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="extract" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287367 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="extract" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287458 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="90635432-311d-4b2c-8d71-bbd56f653a41" containerName="extract" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.287821 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.291415 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.291479 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-cz7l7" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.291436 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.314814 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.391632 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd6z2\" (UniqueName: \"kubernetes.io/projected/8d747017-ba68-4bba-932d-30c1b7f21c3e-kube-api-access-pd6z2\") pod \"obo-prometheus-operator-668cf9dfbb-vqsrg\" (UID: \"8d747017-ba68-4bba-932d-30c1b7f21c3e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.441338 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.441931 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.444826 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-sjvzp" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.444866 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.455912 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.482784 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.483692 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.495995 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd6z2\" (UniqueName: \"kubernetes.io/projected/8d747017-ba68-4bba-932d-30c1b7f21c3e-kube-api-access-pd6z2\") pod \"obo-prometheus-operator-668cf9dfbb-vqsrg\" (UID: \"8d747017-ba68-4bba-932d-30c1b7f21c3e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.496085 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.496133 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.507569 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.523904 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd6z2\" (UniqueName: \"kubernetes.io/projected/8d747017-ba68-4bba-932d-30c1b7f21c3e-kube-api-access-pd6z2\") pod \"obo-prometheus-operator-668cf9dfbb-vqsrg\" (UID: \"8d747017-ba68-4bba-932d-30c1b7f21c3e\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.597711 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.597778 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.597799 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.598009 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.600792 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.600866 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2dbda7da-f5c3-43c2-92a7-397c48293f0b-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl\" (UID: \"2dbda7da-f5c3-43c2-92a7-397c48293f0b\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.602670 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.636240 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-c4b5b"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.637032 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.641032 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.641136 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-2zqmh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.647485 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-c4b5b"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.699674 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzdwz\" (UniqueName: \"kubernetes.io/projected/28e7bf38-168c-4a52-8e29-5036b9adc3ab-kube-api-access-jzdwz\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.699748 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/28e7bf38-168c-4a52-8e29-5036b9adc3ab-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.699790 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.699858 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.703148 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.705175 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/248a8e31-62c3-4bc6-81d7-8d603174184f-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-dd5f85999-sskx2\" (UID: \"248a8e31-62c3-4bc6-81d7-8d603174184f\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.724568 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7j5zh"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.725709 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.727811 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-rj28v" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.736854 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7j5zh"] Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.757848 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.800794 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl64x\" (UniqueName: \"kubernetes.io/projected/8badb2a5-9456-4325-89d3-68f8db885c95-kube-api-access-pl64x\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.800886 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzdwz\" (UniqueName: \"kubernetes.io/projected/28e7bf38-168c-4a52-8e29-5036b9adc3ab-kube-api-access-jzdwz\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.800913 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/8badb2a5-9456-4325-89d3-68f8db885c95-openshift-service-ca\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.800934 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/28e7bf38-168c-4a52-8e29-5036b9adc3ab-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.807723 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/28e7bf38-168c-4a52-8e29-5036b9adc3ab-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.817837 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.834547 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzdwz\" (UniqueName: \"kubernetes.io/projected/28e7bf38-168c-4a52-8e29-5036b9adc3ab-kube-api-access-jzdwz\") pod \"observability-operator-d8bb48f5d-c4b5b\" (UID: \"28e7bf38-168c-4a52-8e29-5036b9adc3ab\") " pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.904651 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/8badb2a5-9456-4325-89d3-68f8db885c95-openshift-service-ca\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.904734 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl64x\" (UniqueName: \"kubernetes.io/projected/8badb2a5-9456-4325-89d3-68f8db885c95-kube-api-access-pl64x\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.905909 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/8badb2a5-9456-4325-89d3-68f8db885c95-openshift-service-ca\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.929845 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl64x\" (UniqueName: \"kubernetes.io/projected/8badb2a5-9456-4325-89d3-68f8db885c95-kube-api-access-pl64x\") pod \"perses-operator-5446b9c989-7j5zh\" (UID: \"8badb2a5-9456-4325-89d3-68f8db885c95\") " pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:34 crc kubenswrapper[4723]: I1211 15:34:34.968491 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.025298 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg"] Dec 11 15:34:35 crc kubenswrapper[4723]: W1211 15:34:35.036731 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d747017_ba68_4bba_932d_30c1b7f21c3e.slice/crio-21437d5bc5f2defac436ac15168747ae1e81c4cac6446212cfb5f369023f6a59 WatchSource:0}: Error finding container 21437d5bc5f2defac436ac15168747ae1e81c4cac6446212cfb5f369023f6a59: Status 404 returned error can't find the container with id 21437d5bc5f2defac436ac15168747ae1e81c4cac6446212cfb5f369023f6a59 Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.059183 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.178317 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl"] Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.248826 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2"] Dec 11 15:34:35 crc kubenswrapper[4723]: W1211 15:34:35.251089 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod248a8e31_62c3_4bc6_81d7_8d603174184f.slice/crio-a51a7f2a9d2c6229ac7e7663383403645cdc74b55a9fb2c7041040f8827c4bb1 WatchSource:0}: Error finding container a51a7f2a9d2c6229ac7e7663383403645cdc74b55a9fb2c7041040f8827c4bb1: Status 404 returned error can't find the container with id a51a7f2a9d2c6229ac7e7663383403645cdc74b55a9fb2c7041040f8827c4bb1 Dec 11 15:34:35 crc kubenswrapper[4723]: W1211 15:34:35.256432 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dbda7da_f5c3_43c2_92a7_397c48293f0b.slice/crio-be72f02fa00e76f51b7e147252538f8a9dbd1a3cbf9c073b6c22b47fed870976 WatchSource:0}: Error finding container be72f02fa00e76f51b7e147252538f8a9dbd1a3cbf9c073b6c22b47fed870976: Status 404 returned error can't find the container with id be72f02fa00e76f51b7e147252538f8a9dbd1a3cbf9c073b6c22b47fed870976 Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.530997 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" event={"ID":"248a8e31-62c3-4bc6-81d7-8d603174184f","Type":"ContainerStarted","Data":"a51a7f2a9d2c6229ac7e7663383403645cdc74b55a9fb2c7041040f8827c4bb1"} Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.532651 4723 generic.go:334] "Generic (PLEG): container finished" podID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerID="519ff83e025fa30e568aac037f47890cbf7cf50cddb103aa6880032327ecb338" exitCode=0 Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.532714 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerDied","Data":"519ff83e025fa30e568aac037f47890cbf7cf50cddb103aa6880032327ecb338"} Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.534349 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" event={"ID":"2dbda7da-f5c3-43c2-92a7-397c48293f0b","Type":"ContainerStarted","Data":"be72f02fa00e76f51b7e147252538f8a9dbd1a3cbf9c073b6c22b47fed870976"} Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.535682 4723 generic.go:334] "Generic (PLEG): container finished" podID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerID="04339d6ed8cdb9d822ec04aac2a664b3f3833f81013b26ddd5898ff94a818005" exitCode=0 Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.535750 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerDied","Data":"04339d6ed8cdb9d822ec04aac2a664b3f3833f81013b26ddd5898ff94a818005"} Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.537190 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" event={"ID":"8d747017-ba68-4bba-932d-30c1b7f21c3e","Type":"ContainerStarted","Data":"21437d5bc5f2defac436ac15168747ae1e81c4cac6446212cfb5f369023f6a59"} Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.593707 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-7j5zh"] Dec 11 15:34:35 crc kubenswrapper[4723]: W1211 15:34:35.600443 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8badb2a5_9456_4325_89d3_68f8db885c95.slice/crio-266db6cb3b8de8a7355f5c2d6565309013d630baddb8a579f8aef8dd58a79037 WatchSource:0}: Error finding container 266db6cb3b8de8a7355f5c2d6565309013d630baddb8a579f8aef8dd58a79037: Status 404 returned error can't find the container with id 266db6cb3b8de8a7355f5c2d6565309013d630baddb8a579f8aef8dd58a79037 Dec 11 15:34:35 crc kubenswrapper[4723]: I1211 15:34:35.619353 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-c4b5b"] Dec 11 15:34:35 crc kubenswrapper[4723]: W1211 15:34:35.624411 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28e7bf38_168c_4a52_8e29_5036b9adc3ab.slice/crio-d0c316b00a5187012fdf0295beb2e919f16a6779d143b306eaffdf4658202657 WatchSource:0}: Error finding container d0c316b00a5187012fdf0295beb2e919f16a6779d143b306eaffdf4658202657: Status 404 returned error can't find the container with id d0c316b00a5187012fdf0295beb2e919f16a6779d143b306eaffdf4658202657 Dec 11 15:34:36 crc kubenswrapper[4723]: I1211 15:34:36.544398 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" event={"ID":"8badb2a5-9456-4325-89d3-68f8db885c95","Type":"ContainerStarted","Data":"266db6cb3b8de8a7355f5c2d6565309013d630baddb8a579f8aef8dd58a79037"} Dec 11 15:34:36 crc kubenswrapper[4723]: I1211 15:34:36.545993 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" event={"ID":"28e7bf38-168c-4a52-8e29-5036b9adc3ab","Type":"ContainerStarted","Data":"d0c316b00a5187012fdf0295beb2e919f16a6779d143b306eaffdf4658202657"} Dec 11 15:34:36 crc kubenswrapper[4723]: I1211 15:34:36.547954 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerStarted","Data":"49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00"} Dec 11 15:34:39 crc kubenswrapper[4723]: I1211 15:34:39.361080 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:39 crc kubenswrapper[4723]: I1211 15:34:39.362166 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:39 crc kubenswrapper[4723]: I1211 15:34:39.503778 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:39 crc kubenswrapper[4723]: I1211 15:34:39.788198 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x7fch" podStartSLOduration=6.0371547230000004 podStartE2EDuration="11.78817948s" podCreationTimestamp="2025-12-11 15:34:28 +0000 UTC" firstStartedPulling="2025-12-11 15:34:30.305239772 +0000 UTC m=+681.079473207" lastFinishedPulling="2025-12-11 15:34:36.056264529 +0000 UTC m=+686.830497964" observedRunningTime="2025-12-11 15:34:36.576925266 +0000 UTC m=+687.351158701" watchObservedRunningTime="2025-12-11 15:34:39.78817948 +0000 UTC m=+690.562412915" Dec 11 15:34:40 crc kubenswrapper[4723]: I1211 15:34:40.916980 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:41 crc kubenswrapper[4723]: I1211 15:34:41.018495 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.871260 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-6f4b568f8c-ddzz2"] Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.872119 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.877504 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.877754 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-824mm" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.877927 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.886820 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.919146 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6f4b568f8c-ddzz2"] Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.945910 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-webhook-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.946032 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t44q\" (UniqueName: \"kubernetes.io/projected/a6895274-21c7-41d3-8e64-38aab2435100-kube-api-access-7t44q\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:42 crc kubenswrapper[4723]: I1211 15:34:42.946061 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-apiservice-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.057791 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t44q\" (UniqueName: \"kubernetes.io/projected/a6895274-21c7-41d3-8e64-38aab2435100-kube-api-access-7t44q\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.057842 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-apiservice-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.057868 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-webhook-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.097017 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t44q\" (UniqueName: \"kubernetes.io/projected/a6895274-21c7-41d3-8e64-38aab2435100-kube-api-access-7t44q\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.181779 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-webhook-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.200166 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6895274-21c7-41d3-8e64-38aab2435100-apiservice-cert\") pod \"elastic-operator-6f4b568f8c-ddzz2\" (UID: \"a6895274-21c7-41d3-8e64-38aab2435100\") " pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.218859 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.790303 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.790957 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x7d5c" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" containerID="cri-o://d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" gracePeriod=2 Dec 11 15:34:43 crc kubenswrapper[4723]: I1211 15:34:43.797207 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6f4b568f8c-ddzz2"] Dec 11 15:34:43 crc kubenswrapper[4723]: W1211 15:34:43.814828 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6895274_21c7_41d3_8e64_38aab2435100.slice/crio-737e5bf700038ce5bbef7112f70eb64fbcef3c4bc8f455f406978dc153ad67f0 WatchSource:0}: Error finding container 737e5bf700038ce5bbef7112f70eb64fbcef3c4bc8f455f406978dc153ad67f0: Status 404 returned error can't find the container with id 737e5bf700038ce5bbef7112f70eb64fbcef3c4bc8f455f406978dc153ad67f0 Dec 11 15:34:44 crc kubenswrapper[4723]: I1211 15:34:44.774881 4723 generic.go:334] "Generic (PLEG): container finished" podID="23ba2517-d363-417e-8244-ae29e5da10ce" containerID="d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" exitCode=0 Dec 11 15:34:44 crc kubenswrapper[4723]: I1211 15:34:44.774984 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerDied","Data":"d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f"} Dec 11 15:34:44 crc kubenswrapper[4723]: I1211 15:34:44.789999 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" event={"ID":"a6895274-21c7-41d3-8e64-38aab2435100","Type":"ContainerStarted","Data":"737e5bf700038ce5bbef7112f70eb64fbcef3c4bc8f455f406978dc153ad67f0"} Dec 11 15:34:49 crc kubenswrapper[4723]: I1211 15:34:49.467285 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:34:50 crc kubenswrapper[4723]: E1211 15:34:50.752758 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f is running failed: container process not found" containerID="d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:50 crc kubenswrapper[4723]: E1211 15:34:50.754451 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f is running failed: container process not found" containerID="d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:50 crc kubenswrapper[4723]: E1211 15:34:50.755264 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f is running failed: container process not found" containerID="d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:50 crc kubenswrapper[4723]: E1211 15:34:50.755294 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-x7d5c" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" Dec 11 15:34:52 crc kubenswrapper[4723]: I1211 15:34:52.603357 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:34:52 crc kubenswrapper[4723]: I1211 15:34:52.603612 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x7fch" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="registry-server" containerID="cri-o://49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" gracePeriod=2 Dec 11 15:34:52 crc kubenswrapper[4723]: I1211 15:34:52.836574 4723 generic.go:334] "Generic (PLEG): container finished" podID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerID="49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" exitCode=0 Dec 11 15:34:52 crc kubenswrapper[4723]: I1211 15:34:52.836879 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerDied","Data":"49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00"} Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.618161 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.619037 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pd6z2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-vqsrg_openshift-operators(8d747017-ba68-4bba-932d-30c1b7f21c3e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.620777 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" podUID="8d747017-ba68-4bba-932d-30c1b7f21c3e" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.869366 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" podUID="8d747017-ba68-4bba-932d-30c1b7f21c3e" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.911253 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.redhat.io/cert-manager/cert-manager-operator-bundle@sha256:acaaea813059d4ac5b2618395bd9113f72ada0a33aaaba91aa94f000e77df407" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.911430 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:pull,Image:registry.redhat.io/cert-manager/cert-manager-operator-bundle@sha256:acaaea813059d4ac5b2618395bd9113f72ada0a33aaaba91aa94f000e77df407,Command:[/util/cpb /bundle],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:bundle,ReadOnly:false,MountPath:/bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:util,ReadOnly:false,MountPath:/util,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ltmsk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod 1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_openshift-marketplace(c62e47fd-6638-4d88-a884-a0b2bef6b59f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 15:34:57 crc kubenswrapper[4723]: E1211 15:34:57.912805 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pull\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.940561 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.984449 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqncz\" (UniqueName: \"kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz\") pod \"23ba2517-d363-417e-8244-ae29e5da10ce\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.984518 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities\") pod \"23ba2517-d363-417e-8244-ae29e5da10ce\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.984541 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content\") pod \"23ba2517-d363-417e-8244-ae29e5da10ce\" (UID: \"23ba2517-d363-417e-8244-ae29e5da10ce\") " Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.986389 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities" (OuterVolumeSpecName: "utilities") pod "23ba2517-d363-417e-8244-ae29e5da10ce" (UID: "23ba2517-d363-417e-8244-ae29e5da10ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:57 crc kubenswrapper[4723]: I1211 15:34:57.994272 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz" (OuterVolumeSpecName: "kube-api-access-sqncz") pod "23ba2517-d363-417e-8244-ae29e5da10ce" (UID: "23ba2517-d363-417e-8244-ae29e5da10ce"). InnerVolumeSpecName "kube-api-access-sqncz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.102514 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23ba2517-d363-417e-8244-ae29e5da10ce" (UID: "23ba2517-d363-417e-8244-ae29e5da10ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.108000 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqncz\" (UniqueName: \"kubernetes.io/projected/23ba2517-d363-417e-8244-ae29e5da10ce-kube-api-access-sqncz\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.108088 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.209615 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ba2517-d363-417e-8244-ae29e5da10ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:34:58 crc kubenswrapper[4723]: E1211 15:34:58.750537 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 11 15:34:58 crc kubenswrapper[4723]: E1211 15:34:58.750955 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl_openshift-operators(2dbda7da-f5c3-43c2-92a7-397c48293f0b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:34:58 crc kubenswrapper[4723]: E1211 15:34:58.769289 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" podUID="2dbda7da-f5c3-43c2-92a7-397c48293f0b" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.875510 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x7d5c" Dec 11 15:34:58 crc kubenswrapper[4723]: E1211 15:34:58.880322 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" podUID="2dbda7da-f5c3-43c2-92a7-397c48293f0b" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.880335 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x7d5c" event={"ID":"23ba2517-d363-417e-8244-ae29e5da10ce","Type":"ContainerDied","Data":"30d5ed6313641efa4443668ba11830f5e5649b8d69493e66d5fb809f59a3704b"} Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.880413 4723 scope.go:117] "RemoveContainer" containerID="d85a12d53d4ecad0b9c6cd01fcc0a9d2f1813f2b10ea6e7551f636ed7496152f" Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.912192 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:58 crc kubenswrapper[4723]: I1211 15:34:58.916944 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x7d5c"] Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.361374 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00 is running failed: container process not found" containerID="49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.362085 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00 is running failed: container process not found" containerID="49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.362371 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00 is running failed: container process not found" containerID="49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.362404 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-x7fch" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="registry-server" Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.369231 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385" Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.369410 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pl64x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5446b9c989-7j5zh_openshift-operators(8badb2a5-9456-4325-89d3-68f8db885c95): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.370631 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" podUID="8badb2a5-9456-4325-89d3-68f8db885c95" Dec 11 15:34:59 crc kubenswrapper[4723]: I1211 15:34:59.573633 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" path="/var/lib/kubelet/pods/23ba2517-d363-417e-8244-ae29e5da10ce/volumes" Dec 11 15:34:59 crc kubenswrapper[4723]: E1211 15:34:59.881898 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:9aec4c328ec43e40481e06ca5808deead74b75c0aacb90e9e72966c3fa14f385\\\"\"" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" podUID="8badb2a5-9456-4325-89d3-68f8db885c95" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.588105 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.588365 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-dd5f85999-sskx2_openshift-operators(248a8e31-62c3-4bc6-81d7-8d603174184f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.590183 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" podUID="248a8e31-62c3-4bc6-81d7-8d603174184f" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.594697 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.594919 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jzdwz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-c4b5b_openshift-operators(28e7bf38-168c-4a52-8e29-5036b9adc3ab): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.596586 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" podUID="28e7bf38-168c-4a52-8e29-5036b9adc3ab" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.618462 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.660374 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g98h7\" (UniqueName: \"kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7\") pod \"07eead0e-ad88-4884-ad27-2ace3526fc3e\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.660445 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities\") pod \"07eead0e-ad88-4884-ad27-2ace3526fc3e\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.660478 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content\") pod \"07eead0e-ad88-4884-ad27-2ace3526fc3e\" (UID: \"07eead0e-ad88-4884-ad27-2ace3526fc3e\") " Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.661842 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities" (OuterVolumeSpecName: "utilities") pod "07eead0e-ad88-4884-ad27-2ace3526fc3e" (UID: "07eead0e-ad88-4884-ad27-2ace3526fc3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.666596 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7" (OuterVolumeSpecName: "kube-api-access-g98h7") pod "07eead0e-ad88-4884-ad27-2ace3526fc3e" (UID: "07eead0e-ad88-4884-ad27-2ace3526fc3e"). InnerVolumeSpecName "kube-api-access-g98h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.718247 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07eead0e-ad88-4884-ad27-2ace3526fc3e" (UID: "07eead0e-ad88-4884-ad27-2ace3526fc3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.761277 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g98h7\" (UniqueName: \"kubernetes.io/projected/07eead0e-ad88-4884-ad27-2ace3526fc3e-kube-api-access-g98h7\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.761305 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.761317 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07eead0e-ad88-4884-ad27-2ace3526fc3e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.893735 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x7fch" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.901233 4723 scope.go:117] "RemoveContainer" containerID="efa87e30296c4b20212ecd38c68151ecb88d46e420acd523f0c92b76a036a1ed" Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.901276 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" podUID="248a8e31-62c3-4bc6-81d7-8d603174184f" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.901506 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x7fch" event={"ID":"07eead0e-ad88-4884-ad27-2ace3526fc3e","Type":"ContainerDied","Data":"1cd1b048a98a2c86f44ac423dc7c1e64cd683b1aa642c0c698de2c6f38f06dd5"} Dec 11 15:35:01 crc kubenswrapper[4723]: E1211 15:35:01.903065 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" podUID="28e7bf38-168c-4a52-8e29-5036b9adc3ab" Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.955830 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:35:01 crc kubenswrapper[4723]: I1211 15:35:01.959373 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x7fch"] Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.654920 4723 scope.go:117] "RemoveContainer" containerID="ccc846b7a7a6a1d2f3f4d2dff9b5fcbfdc4e3cc3be1ecb1bc02c7b9ac15946ef" Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.673432 4723 scope.go:117] "RemoveContainer" containerID="49ac4ef228020df6e0cb001a9efb34caccebb65e017f896ac7e6750d4ba9fc00" Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.708729 4723 scope.go:117] "RemoveContainer" containerID="519ff83e025fa30e568aac037f47890cbf7cf50cddb103aa6880032327ecb338" Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.724087 4723 scope.go:117] "RemoveContainer" containerID="52c36f8c849ca4677393fe82b8fc73a38a6fb224393dbae1ed5e12dd2ac94194" Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.897124 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" event={"ID":"a6895274-21c7-41d3-8e64-38aab2435100","Type":"ContainerStarted","Data":"38823a4031afc1ccfb74dd644bffb62b47c88b13a7b57d9fb6456aeeded5d05c"} Dec 11 15:35:02 crc kubenswrapper[4723]: I1211 15:35:02.918735 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-6f4b568f8c-ddzz2" podStartSLOduration=2.014028516 podStartE2EDuration="20.918710287s" podCreationTimestamp="2025-12-11 15:34:42 +0000 UTC" firstStartedPulling="2025-12-11 15:34:43.819841055 +0000 UTC m=+694.594074490" lastFinishedPulling="2025-12-11 15:35:02.724522816 +0000 UTC m=+713.498756261" observedRunningTime="2025-12-11 15:35:02.914414973 +0000 UTC m=+713.688648418" watchObservedRunningTime="2025-12-11 15:35:02.918710287 +0000 UTC m=+713.692943722" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.554883 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" path="/var/lib/kubelet/pods/07eead0e-ad88-4884-ad27-2ace3526fc3e/volumes" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.927882 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928185 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="extract-content" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928199 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="extract-content" Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928210 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="extract-content" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928216 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="extract-content" Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928225 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928231 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928242 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928247 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928257 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="extract-utilities" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928263 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="extract-utilities" Dec 11 15:35:03 crc kubenswrapper[4723]: E1211 15:35:03.928273 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="extract-utilities" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928278 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="extract-utilities" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928378 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="23ba2517-d363-417e-8244-ae29e5da10ce" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.928391 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="07eead0e-ad88-4884-ad27-2ace3526fc3e" containerName="registry-server" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.929205 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.932276 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.932276 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-zx8hn" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.932780 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.933131 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.933375 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.933656 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.933808 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.935324 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.937306 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.951148 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989384 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989436 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989477 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989508 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989649 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989838 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989924 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.989953 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990043 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990128 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990171 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990206 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/e6c2c683-4358-47d5-b5cd-e97c588b965e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990295 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990416 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:03 crc kubenswrapper[4723]: I1211 15:35:03.990543 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091258 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091305 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091326 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091354 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091378 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091396 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091413 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091433 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091447 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091466 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091495 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091513 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091533 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/e6c2c683-4358-47d5-b5cd-e97c588b965e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091557 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091574 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.091831 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092028 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092405 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092509 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092573 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092762 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.092846 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.093487 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.096160 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.096311 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.096362 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.096381 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.096761 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.098518 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/e6c2c683-4358-47d5-b5cd-e97c588b965e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.104062 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/e6c2c683-4358-47d5-b5cd-e97c588b965e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"e6c2c683-4358-47d5-b5cd-e97c588b965e\") " pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.246461 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.439895 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 11 15:35:04 crc kubenswrapper[4723]: I1211 15:35:04.910173 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"e6c2c683-4358-47d5-b5cd-e97c588b965e","Type":"ContainerStarted","Data":"d2f2d1978e7bc96cc427adeb97187c91e2b5aeff55368442104681958f4d6341"} Dec 11 15:35:12 crc kubenswrapper[4723]: I1211 15:35:12.961449 4723 generic.go:334] "Generic (PLEG): container finished" podID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerID="f19db89bd487758cacb4db04058b11a0960a3c675368e9f1ae1f788a1369652e" exitCode=0 Dec 11 15:35:12 crc kubenswrapper[4723]: I1211 15:35:12.961535 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerDied","Data":"f19db89bd487758cacb4db04058b11a0960a3c675368e9f1ae1f788a1369652e"} Dec 11 15:35:28 crc kubenswrapper[4723]: E1211 15:35:28.145103 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Dec 11 15:35:28 crc kubenswrapper[4723]: E1211 15:35:28.146501 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:elastic-internal-init-filesystem,Image:registry.connect.redhat.com/elastic/elasticsearch:7.17.20,Command:[bash -c /mnt/elastic-internal/scripts/prepare-fs.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:HEADLESS_SERVICE_NAME,Value:elasticsearch-es-default,ValueFrom:nil,},EnvVar{Name:PROBE_PASSWORD_PATH,Value:/mnt/elastic-internal/pod-mounted-users/elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:PROBE_USERNAME,Value:elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:READINESS_PROBE_PROTOCOL,Value:https,ValueFrom:nil,},EnvVar{Name:NSS_SDB_USE_CACHE,Value:no,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:downward-api,ReadOnly:true,MountPath:/mnt/elastic-internal/downward-api,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-bin-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-bin-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config,ReadOnly:true,MountPath:/mnt/elastic-internal/elasticsearch-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-config-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-plugins-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-plugins-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-http-certificates,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/http-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-probe-user,ReadOnly:true,MountPath:/mnt/elastic-internal/pod-mounted-users,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-remote-certificate-authorities,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/transport-remote-certs/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-scripts,ReadOnly:true,MountPath:/mnt/elastic-internal/scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-transport-certificates,ReadOnly:true,MountPath:/mnt/elastic-internal/transport-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-unicast-hosts,ReadOnly:true,MountPath:/mnt/elastic-internal/unicast-hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-xpack-file-realm,ReadOnly:true,MountPath:/mnt/elastic-internal/xpack-file-realm,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-data,ReadOnly:false,MountPath:/usr/share/elasticsearch/data,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-logs,ReadOnly:false,MountPath:/usr/share/elasticsearch/logs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tmp-volume,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elasticsearch-es-default-0_service-telemetry(e6c2c683-4358-47d5-b5cd-e97c588b965e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 15:35:28 crc kubenswrapper[4723]: E1211 15:35:28.147797 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.057528 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" event={"ID":"2dbda7da-f5c3-43c2-92a7-397c48293f0b","Type":"ContainerStarted","Data":"88c056fa6a2cdab177ad3b70a656504964df09b7d125fb4679954eae5ef38059"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.061673 4723 generic.go:334] "Generic (PLEG): container finished" podID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerID="1b201dea3fe3a2f79c2f5679bad41785cf89d758a7e085fe52a32301b5636325" exitCode=0 Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.061719 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerDied","Data":"1b201dea3fe3a2f79c2f5679bad41785cf89d758a7e085fe52a32301b5636325"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.063712 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" event={"ID":"8badb2a5-9456-4325-89d3-68f8db885c95","Type":"ContainerStarted","Data":"8260e6685c4c085f95ad27bed791cd78a17e23779ed62bb089092c7b5876689c"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.063900 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.065547 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" event={"ID":"8d747017-ba68-4bba-932d-30c1b7f21c3e","Type":"ContainerStarted","Data":"c7c4b2748c0881fc3af3292be1b0d01b92778b6943bf724f792b986229e2f1c7"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.067538 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" event={"ID":"248a8e31-62c3-4bc6-81d7-8d603174184f","Type":"ContainerStarted","Data":"8f1e7c03244dcbe0a6b15f7d52b15537640b787cef40321b73e9dfd1732fc7dc"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.070292 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" event={"ID":"28e7bf38-168c-4a52-8e29-5036b9adc3ab","Type":"ContainerStarted","Data":"e1cdc10bb3c91ea8be48874e085d33eb385d64acb45ef90800ad5af3c105c871"} Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.072847 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:35:29 crc kubenswrapper[4723]: E1211 15:35:29.072961 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.085011 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.102579 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl" podStartSLOduration=1.839825694 podStartE2EDuration="55.102560838s" podCreationTimestamp="2025-12-11 15:34:34 +0000 UTC" firstStartedPulling="2025-12-11 15:34:35.258195115 +0000 UTC m=+686.032428550" lastFinishedPulling="2025-12-11 15:35:28.520930259 +0000 UTC m=+739.295163694" observedRunningTime="2025-12-11 15:35:29.100083201 +0000 UTC m=+739.874316636" watchObservedRunningTime="2025-12-11 15:35:29.102560838 +0000 UTC m=+739.876794273" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.133293 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-c4b5b" podStartSLOduration=2.221401613 podStartE2EDuration="55.133276759s" podCreationTimestamp="2025-12-11 15:34:34 +0000 UTC" firstStartedPulling="2025-12-11 15:34:35.627327892 +0000 UTC m=+686.401561327" lastFinishedPulling="2025-12-11 15:35:28.539203038 +0000 UTC m=+739.313436473" observedRunningTime="2025-12-11 15:35:29.127605697 +0000 UTC m=+739.901839132" watchObservedRunningTime="2025-12-11 15:35:29.133276759 +0000 UTC m=+739.907510194" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.173133 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" podStartSLOduration=2.254681314 podStartE2EDuration="55.173113374s" podCreationTimestamp="2025-12-11 15:34:34 +0000 UTC" firstStartedPulling="2025-12-11 15:34:35.602687014 +0000 UTC m=+686.376920449" lastFinishedPulling="2025-12-11 15:35:28.521119074 +0000 UTC m=+739.295352509" observedRunningTime="2025-12-11 15:35:29.167335019 +0000 UTC m=+739.941568454" watchObservedRunningTime="2025-12-11 15:35:29.173113374 +0000 UTC m=+739.947346809" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.252668 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-vqsrg" podStartSLOduration=1.771481846 podStartE2EDuration="55.252621449s" podCreationTimestamp="2025-12-11 15:34:34 +0000 UTC" firstStartedPulling="2025-12-11 15:34:35.038911563 +0000 UTC m=+685.813144998" lastFinishedPulling="2025-12-11 15:35:28.520051166 +0000 UTC m=+739.294284601" observedRunningTime="2025-12-11 15:35:29.216901714 +0000 UTC m=+739.991135169" watchObservedRunningTime="2025-12-11 15:35:29.252621449 +0000 UTC m=+740.026854884" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.258314 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-dd5f85999-sskx2" podStartSLOduration=1.97177403 podStartE2EDuration="55.258297751s" podCreationTimestamp="2025-12-11 15:34:34 +0000 UTC" firstStartedPulling="2025-12-11 15:34:35.25354776 +0000 UTC m=+686.027781195" lastFinishedPulling="2025-12-11 15:35:28.540071481 +0000 UTC m=+739.314304916" observedRunningTime="2025-12-11 15:35:29.254438348 +0000 UTC m=+740.028671793" watchObservedRunningTime="2025-12-11 15:35:29.258297751 +0000 UTC m=+740.032531186" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.451163 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.493606 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.993668 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.994432 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.996416 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-qr2j2" Dec 11 15:35:29 crc kubenswrapper[4723]: I1211 15:35:29.999109 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.072396 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9gg8\" (UniqueName: \"kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8\") pod \"infrawatch-operators-h6c9k\" (UID: \"b5e28688-3dda-482d-87b3-76899d96e575\") " pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:30 crc kubenswrapper[4723]: E1211 15:35:30.076259 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.174080 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9gg8\" (UniqueName: \"kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8\") pod \"infrawatch-operators-h6c9k\" (UID: \"b5e28688-3dda-482d-87b3-76899d96e575\") " pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.207569 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9gg8\" (UniqueName: \"kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8\") pod \"infrawatch-operators-h6c9k\" (UID: \"b5e28688-3dda-482d-87b3-76899d96e575\") " pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.313142 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.492944 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.582058 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util\") pod \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.582106 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltmsk\" (UniqueName: \"kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk\") pod \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.588047 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk" (OuterVolumeSpecName: "kube-api-access-ltmsk") pod "c62e47fd-6638-4d88-a884-a0b2bef6b59f" (UID: "c62e47fd-6638-4d88-a884-a0b2bef6b59f"). InnerVolumeSpecName "kube-api-access-ltmsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.588193 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle\") pod \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\" (UID: \"c62e47fd-6638-4d88-a884-a0b2bef6b59f\") " Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.588875 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltmsk\" (UniqueName: \"kubernetes.io/projected/c62e47fd-6638-4d88-a884-a0b2bef6b59f-kube-api-access-ltmsk\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.589891 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle" (OuterVolumeSpecName: "bundle") pod "c62e47fd-6638-4d88-a884-a0b2bef6b59f" (UID: "c62e47fd-6638-4d88-a884-a0b2bef6b59f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.600116 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util" (OuterVolumeSpecName: "util") pod "c62e47fd-6638-4d88-a884-a0b2bef6b59f" (UID: "c62e47fd-6638-4d88-a884-a0b2bef6b59f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.689929 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.689983 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c62e47fd-6638-4d88-a884-a0b2bef6b59f-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:30 crc kubenswrapper[4723]: I1211 15:35:30.760637 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:30 crc kubenswrapper[4723]: W1211 15:35:30.766643 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5e28688_3dda_482d_87b3_76899d96e575.slice/crio-3e43b1882bfdaca597d5f143889be9d5f7a6a7d1710c6f3b6c4c61ef01c662b3 WatchSource:0}: Error finding container 3e43b1882bfdaca597d5f143889be9d5f7a6a7d1710c6f3b6c4c61ef01c662b3: Status 404 returned error can't find the container with id 3e43b1882bfdaca597d5f143889be9d5f7a6a7d1710c6f3b6c4c61ef01c662b3 Dec 11 15:35:31 crc kubenswrapper[4723]: I1211 15:35:31.081403 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" event={"ID":"c62e47fd-6638-4d88-a884-a0b2bef6b59f","Type":"ContainerDied","Data":"0ee68e0513190e44209ad56b5925019bd8165240c0b4b8b4c8e682e4bf10418c"} Dec 11 15:35:31 crc kubenswrapper[4723]: I1211 15:35:31.081446 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ee68e0513190e44209ad56b5925019bd8165240c0b4b8b4c8e682e4bf10418c" Dec 11 15:35:31 crc kubenswrapper[4723]: I1211 15:35:31.081452 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z" Dec 11 15:35:31 crc kubenswrapper[4723]: I1211 15:35:31.082274 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-h6c9k" event={"ID":"b5e28688-3dda-482d-87b3-76899d96e575","Type":"ContainerStarted","Data":"3e43b1882bfdaca597d5f143889be9d5f7a6a7d1710c6f3b6c4c61ef01c662b3"} Dec 11 15:35:31 crc kubenswrapper[4723]: E1211 15:35:31.083709 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" Dec 11 15:35:33 crc kubenswrapper[4723]: I1211 15:35:33.799705 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.101480 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-h6c9k" event={"ID":"b5e28688-3dda-482d-87b3-76899d96e575","Type":"ContainerStarted","Data":"80b3cb974fa8ef8defe1fa43e4fc21bd83860e87014685fb54ea0bba81c97756"} Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.117345 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-h6c9k" podStartSLOduration=2.743991557 podStartE2EDuration="5.117328462s" podCreationTimestamp="2025-12-11 15:35:29 +0000 UTC" firstStartedPulling="2025-12-11 15:35:30.768686046 +0000 UTC m=+741.542919481" lastFinishedPulling="2025-12-11 15:35:33.142022951 +0000 UTC m=+743.916256386" observedRunningTime="2025-12-11 15:35:34.113117909 +0000 UTC m=+744.887351344" watchObservedRunningTime="2025-12-11 15:35:34.117328462 +0000 UTC m=+744.891561897" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.606810 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-sq82k"] Dec 11 15:35:34 crc kubenswrapper[4723]: E1211 15:35:34.607065 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="util" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.607080 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="util" Dec 11 15:35:34 crc kubenswrapper[4723]: E1211 15:35:34.607103 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="pull" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.607110 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="pull" Dec 11 15:35:34 crc kubenswrapper[4723]: E1211 15:35:34.607129 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="extract" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.607137 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="extract" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.607247 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="c62e47fd-6638-4d88-a884-a0b2bef6b59f" containerName="extract" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.607639 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.614048 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-sq82k"] Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.636937 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p58ms\" (UniqueName: \"kubernetes.io/projected/d2f9c747-2d49-42f4-9f1f-2d1a005add73-kube-api-access-p58ms\") pod \"infrawatch-operators-sq82k\" (UID: \"d2f9c747-2d49-42f4-9f1f-2d1a005add73\") " pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.738171 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p58ms\" (UniqueName: \"kubernetes.io/projected/d2f9c747-2d49-42f4-9f1f-2d1a005add73-kube-api-access-p58ms\") pod \"infrawatch-operators-sq82k\" (UID: \"d2f9c747-2d49-42f4-9f1f-2d1a005add73\") " pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.758575 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p58ms\" (UniqueName: \"kubernetes.io/projected/d2f9c747-2d49-42f4-9f1f-2d1a005add73-kube-api-access-p58ms\") pod \"infrawatch-operators-sq82k\" (UID: \"d2f9c747-2d49-42f4-9f1f-2d1a005add73\") " pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:34 crc kubenswrapper[4723]: I1211 15:35:34.920804 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:35 crc kubenswrapper[4723]: I1211 15:35:35.061455 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-7j5zh" Dec 11 15:35:35 crc kubenswrapper[4723]: I1211 15:35:35.108504 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-h6c9k" podUID="b5e28688-3dda-482d-87b3-76899d96e575" containerName="registry-server" containerID="cri-o://80b3cb974fa8ef8defe1fa43e4fc21bd83860e87014685fb54ea0bba81c97756" gracePeriod=2 Dec 11 15:35:35 crc kubenswrapper[4723]: I1211 15:35:35.115428 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-sq82k"] Dec 11 15:35:35 crc kubenswrapper[4723]: W1211 15:35:35.124742 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2f9c747_2d49_42f4_9f1f_2d1a005add73.slice/crio-9678ec16a9161859933ac80b393cc73f22e77fb5c2fadd7d649869a0dcb449ad WatchSource:0}: Error finding container 9678ec16a9161859933ac80b393cc73f22e77fb5c2fadd7d649869a0dcb449ad: Status 404 returned error can't find the container with id 9678ec16a9161859933ac80b393cc73f22e77fb5c2fadd7d649869a0dcb449ad Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.131474 4723 generic.go:334] "Generic (PLEG): container finished" podID="b5e28688-3dda-482d-87b3-76899d96e575" containerID="80b3cb974fa8ef8defe1fa43e4fc21bd83860e87014685fb54ea0bba81c97756" exitCode=0 Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.131562 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-h6c9k" event={"ID":"b5e28688-3dda-482d-87b3-76899d96e575","Type":"ContainerDied","Data":"80b3cb974fa8ef8defe1fa43e4fc21bd83860e87014685fb54ea0bba81c97756"} Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.133951 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-sq82k" event={"ID":"d2f9c747-2d49-42f4-9f1f-2d1a005add73","Type":"ContainerStarted","Data":"4085a568a755dac127153dd94a02e42d96c571dc1ae4c1f20c5454788d3f77a5"} Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.134041 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-sq82k" event={"ID":"d2f9c747-2d49-42f4-9f1f-2d1a005add73","Type":"ContainerStarted","Data":"9678ec16a9161859933ac80b393cc73f22e77fb5c2fadd7d649869a0dcb449ad"} Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.154563 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-sq82k" podStartSLOduration=1.6423064969999999 podStartE2EDuration="2.154544291s" podCreationTimestamp="2025-12-11 15:35:34 +0000 UTC" firstStartedPulling="2025-12-11 15:35:35.127397243 +0000 UTC m=+745.901630678" lastFinishedPulling="2025-12-11 15:35:35.639635037 +0000 UTC m=+746.413868472" observedRunningTime="2025-12-11 15:35:36.148485469 +0000 UTC m=+746.922718894" watchObservedRunningTime="2025-12-11 15:35:36.154544291 +0000 UTC m=+746.928777726" Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.213267 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.326623 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9gg8\" (UniqueName: \"kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8\") pod \"b5e28688-3dda-482d-87b3-76899d96e575\" (UID: \"b5e28688-3dda-482d-87b3-76899d96e575\") " Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.338694 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8" (OuterVolumeSpecName: "kube-api-access-f9gg8") pod "b5e28688-3dda-482d-87b3-76899d96e575" (UID: "b5e28688-3dda-482d-87b3-76899d96e575"). InnerVolumeSpecName "kube-api-access-f9gg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:35:36 crc kubenswrapper[4723]: I1211 15:35:36.428274 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9gg8\" (UniqueName: \"kubernetes.io/projected/b5e28688-3dda-482d-87b3-76899d96e575-kube-api-access-f9gg8\") on node \"crc\" DevicePath \"\"" Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.142002 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-h6c9k" Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.142045 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-h6c9k" event={"ID":"b5e28688-3dda-482d-87b3-76899d96e575","Type":"ContainerDied","Data":"3e43b1882bfdaca597d5f143889be9d5f7a6a7d1710c6f3b6c4c61ef01c662b3"} Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.142082 4723 scope.go:117] "RemoveContainer" containerID="80b3cb974fa8ef8defe1fa43e4fc21bd83860e87014685fb54ea0bba81c97756" Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.185048 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.185500 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-h6c9k"] Dec 11 15:35:37 crc kubenswrapper[4723]: I1211 15:35:37.555466 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5e28688-3dda-482d-87b3-76899d96e575" path="/var/lib/kubelet/pods/b5e28688-3dda-482d-87b3-76899d96e575/volumes" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.799389 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7"] Dec 11 15:35:40 crc kubenswrapper[4723]: E1211 15:35:40.799985 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e28688-3dda-482d-87b3-76899d96e575" containerName="registry-server" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.800000 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e28688-3dda-482d-87b3-76899d96e575" containerName="registry-server" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.800126 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e28688-3dda-482d-87b3-76899d96e575" containerName="registry-server" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.800625 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.802619 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.805219 4723 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-8vxnh" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.805411 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.826486 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7"] Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.905821 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ftsb\" (UniqueName: \"kubernetes.io/projected/276a8a4c-ca04-4ba4-a61f-40834e36a667-kube-api-access-8ftsb\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:40 crc kubenswrapper[4723]: I1211 15:35:40.905955 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/276a8a4c-ca04-4ba4-a61f-40834e36a667-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.007304 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/276a8a4c-ca04-4ba4-a61f-40834e36a667-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.007404 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ftsb\" (UniqueName: \"kubernetes.io/projected/276a8a4c-ca04-4ba4-a61f-40834e36a667-kube-api-access-8ftsb\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.007940 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/276a8a4c-ca04-4ba4-a61f-40834e36a667-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.028301 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ftsb\" (UniqueName: \"kubernetes.io/projected/276a8a4c-ca04-4ba4-a61f-40834e36a667-kube-api-access-8ftsb\") pod \"cert-manager-operator-controller-manager-5446d6888b-bznv7\" (UID: \"276a8a4c-ca04-4ba4-a61f-40834e36a667\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.116379 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" Dec 11 15:35:41 crc kubenswrapper[4723]: I1211 15:35:41.619077 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7"] Dec 11 15:35:42 crc kubenswrapper[4723]: I1211 15:35:42.181848 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" event={"ID":"276a8a4c-ca04-4ba4-a61f-40834e36a667","Type":"ContainerStarted","Data":"2dba3c397a901eb826603db31e50bfdc6c1dd5f8a7b8bf90cc78b16ffc3fc818"} Dec 11 15:35:43 crc kubenswrapper[4723]: I1211 15:35:43.745350 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:35:43 crc kubenswrapper[4723]: I1211 15:35:43.745789 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:35:44 crc kubenswrapper[4723]: I1211 15:35:44.922147 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:44 crc kubenswrapper[4723]: I1211 15:35:44.922220 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:44 crc kubenswrapper[4723]: I1211 15:35:44.997749 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:45 crc kubenswrapper[4723]: I1211 15:35:45.267620 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-sq82k" Dec 11 15:35:46 crc kubenswrapper[4723]: I1211 15:35:46.214708 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"e6c2c683-4358-47d5-b5cd-e97c588b965e","Type":"ContainerStarted","Data":"37acfae1383d574133db7d2b035203d239997a302cf891cbbc6024ea06dd1311"} Dec 11 15:35:46 crc kubenswrapper[4723]: I1211 15:35:46.216381 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" event={"ID":"276a8a4c-ca04-4ba4-a61f-40834e36a667","Type":"ContainerStarted","Data":"fd1c6da3bb1536040ad7454f46faf9d921c6dd1871e3017ed7bf1caa49953ee2"} Dec 11 15:35:46 crc kubenswrapper[4723]: I1211 15:35:46.295646 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-bznv7" podStartSLOduration=2.18959333 podStartE2EDuration="6.295629743s" podCreationTimestamp="2025-12-11 15:35:40 +0000 UTC" firstStartedPulling="2025-12-11 15:35:41.625450089 +0000 UTC m=+752.399683524" lastFinishedPulling="2025-12-11 15:35:45.731486502 +0000 UTC m=+756.505719937" observedRunningTime="2025-12-11 15:35:46.291369529 +0000 UTC m=+757.065602964" watchObservedRunningTime="2025-12-11 15:35:46.295629743 +0000 UTC m=+757.069863178" Dec 11 15:35:47 crc kubenswrapper[4723]: I1211 15:35:47.223615 4723 generic.go:334] "Generic (PLEG): container finished" podID="e6c2c683-4358-47d5-b5cd-e97c588b965e" containerID="37acfae1383d574133db7d2b035203d239997a302cf891cbbc6024ea06dd1311" exitCode=0 Dec 11 15:35:47 crc kubenswrapper[4723]: I1211 15:35:47.223678 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"e6c2c683-4358-47d5-b5cd-e97c588b965e","Type":"ContainerDied","Data":"37acfae1383d574133db7d2b035203d239997a302cf891cbbc6024ea06dd1311"} Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.230726 4723 generic.go:334] "Generic (PLEG): container finished" podID="e6c2c683-4358-47d5-b5cd-e97c588b965e" containerID="d03bd611343933ac20b208d2b54825f9ee040f40f63200e1cc44ae1c214cddb0" exitCode=0 Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.230811 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"e6c2c683-4358-47d5-b5cd-e97c588b965e","Type":"ContainerDied","Data":"d03bd611343933ac20b208d2b54825f9ee040f40f63200e1cc44ae1c214cddb0"} Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.638823 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-8jg8v"] Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.639666 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.658848 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.659289 4723 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-gltcf" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.659554 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.678984 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-8jg8v"] Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.755347 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.755606 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrls9\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-kube-api-access-qrls9\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.856938 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:48 crc kubenswrapper[4723]: I1211 15:35:48.857362 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrls9\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-kube-api-access-qrls9\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.244794 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.290162 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrls9\" (UniqueName: \"kubernetes.io/projected/157016c0-6dd9-40c5-a132-0efa815cc2a2-kube-api-access-qrls9\") pod \"cert-manager-webhook-f4fb5df64-8jg8v\" (UID: \"157016c0-6dd9-40c5-a132-0efa815cc2a2\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.558853 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.961023 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2"] Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.962159 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:49 crc kubenswrapper[4723]: I1211 15:35:49.989674 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2"] Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.140728 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.140807 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.140842 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85xdh\" (UniqueName: \"kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.270661 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.271049 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85xdh\" (UniqueName: \"kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.271136 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.271924 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.272184 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.297483 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85xdh\" (UniqueName: \"kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh\") pod \"e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.584068 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.652650 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-8jg8v"] Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.663345 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49"] Dec 11 15:35:50 crc kubenswrapper[4723]: W1211 15:35:50.664512 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod157016c0_6dd9_40c5_a132_0efa815cc2a2.slice/crio-6bb68684aada144c1cc2c39645d66444d5766f21013896e0706bb93226d39a98 WatchSource:0}: Error finding container 6bb68684aada144c1cc2c39645d66444d5766f21013896e0706bb93226d39a98: Status 404 returned error can't find the container with id 6bb68684aada144c1cc2c39645d66444d5766f21013896e0706bb93226d39a98 Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.664817 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.675740 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.675816 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.675865 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltpds\" (UniqueName: \"kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.681540 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49"] Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.778296 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.778728 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.778791 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltpds\" (UniqueName: \"kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.779199 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.779239 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:50 crc kubenswrapper[4723]: I1211 15:35:50.809584 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltpds\" (UniqueName: \"kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds\") pod \"2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.017220 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.282432 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.287363 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.304643 4723 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-rxgbf" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.312844 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" event={"ID":"157016c0-6dd9-40c5-a132-0efa815cc2a2","Type":"ContainerStarted","Data":"6bb68684aada144c1cc2c39645d66444d5766f21013896e0706bb93226d39a98"} Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.322647 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.352280 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.487286 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5vl5\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-kube-api-access-x5vl5\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.487952 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.515861 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.517708 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.526608 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.547203 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.588943 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5vl5\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-kube-api-access-x5vl5\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.589033 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.613510 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5vl5\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-kube-api-access-x5vl5\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.625649 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8648dc98-8bab-4d61-8669-36126c074dae-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-tc9gs\" (UID: \"8648dc98-8bab-4d61-8669-36126c074dae\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.647313 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.653022 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49"] Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.690328 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ptkr\" (UniqueName: \"kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.690403 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.690453 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.791984 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.792073 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.792139 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ptkr\" (UniqueName: \"kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.793072 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.793345 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.811070 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ptkr\" (UniqueName: \"kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.842694 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:35:51 crc kubenswrapper[4723]: I1211 15:35:51.909153 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs"] Dec 11 15:35:51 crc kubenswrapper[4723]: W1211 15:35:51.915433 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8648dc98_8bab_4d61_8669_36126c074dae.slice/crio-5cc1c73e749522315025640adcdbd04169a9e3dc0deeb7a2ecf7cfea61fd5647 WatchSource:0}: Error finding container 5cc1c73e749522315025640adcdbd04169a9e3dc0deeb7a2ecf7cfea61fd5647: Status 404 returned error can't find the container with id 5cc1c73e749522315025640adcdbd04169a9e3dc0deeb7a2ecf7cfea61fd5647 Dec 11 15:35:52 crc kubenswrapper[4723]: I1211 15:35:52.073493 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt"] Dec 11 15:35:52 crc kubenswrapper[4723]: W1211 15:35:52.075522 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6eb9c879_45c4_4082_b6ff_fe25f5a82211.slice/crio-8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22 WatchSource:0}: Error finding container 8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22: Status 404 returned error can't find the container with id 8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22 Dec 11 15:35:52 crc kubenswrapper[4723]: I1211 15:35:52.320471 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerStarted","Data":"b2a4734a16d1d491adb618b14bbeef29bcb8f6eae1a7916dfc19d8681fe61cf3"} Dec 11 15:35:52 crc kubenswrapper[4723]: I1211 15:35:52.322752 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerStarted","Data":"5a082b0c81b1ae524ae54a51fb3813d2a223b705cd859cda67bcfb547f6c9bb9"} Dec 11 15:35:52 crc kubenswrapper[4723]: I1211 15:35:52.324175 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerStarted","Data":"8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22"} Dec 11 15:35:52 crc kubenswrapper[4723]: I1211 15:35:52.325160 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" event={"ID":"8648dc98-8bab-4d61-8669-36126c074dae","Type":"ContainerStarted","Data":"5cc1c73e749522315025640adcdbd04169a9e3dc0deeb7a2ecf7cfea61fd5647"} Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.343034 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"e6c2c683-4358-47d5-b5cd-e97c588b965e","Type":"ContainerStarted","Data":"c6b102a184374d1c5178a1ea6efa9ba92487f57db3ec5fdb50eb3ddd2a4a697b"} Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.345337 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.346646 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerStarted","Data":"10a1d3f90f928356be6f5827a461742ec538e7e7a277a50bbbf040156aadc200"} Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.348006 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerStarted","Data":"54dbadaac268643efc65491a0e954f564d9fcd942a22da9f29b613c250abc40a"} Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.349708 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerStarted","Data":"8408e5fc16243225b2f1d978148c5e5bfa50c4344a88b3ec133ca55731b19ef2"} Dec 11 15:35:55 crc kubenswrapper[4723]: I1211 15:35:55.587654 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=11.317789258 podStartE2EDuration="52.587633667s" podCreationTimestamp="2025-12-11 15:35:03 +0000 UTC" firstStartedPulling="2025-12-11 15:35:04.448357728 +0000 UTC m=+715.222591153" lastFinishedPulling="2025-12-11 15:35:45.718202127 +0000 UTC m=+756.492435562" observedRunningTime="2025-12-11 15:35:55.582650973 +0000 UTC m=+766.356884408" watchObservedRunningTime="2025-12-11 15:35:55.587633667 +0000 UTC m=+766.361867112" Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.374526 4723 generic.go:334] "Generic (PLEG): container finished" podID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerID="10a1d3f90f928356be6f5827a461742ec538e7e7a277a50bbbf040156aadc200" exitCode=0 Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.374697 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerDied","Data":"10a1d3f90f928356be6f5827a461742ec538e7e7a277a50bbbf040156aadc200"} Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.380439 4723 generic.go:334] "Generic (PLEG): container finished" podID="796ed837-5075-4416-bac4-7b920f3857e3" containerID="54dbadaac268643efc65491a0e954f564d9fcd942a22da9f29b613c250abc40a" exitCode=0 Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.380552 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerDied","Data":"54dbadaac268643efc65491a0e954f564d9fcd942a22da9f29b613c250abc40a"} Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.387147 4723 generic.go:334] "Generic (PLEG): container finished" podID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerID="8408e5fc16243225b2f1d978148c5e5bfa50c4344a88b3ec133ca55731b19ef2" exitCode=0 Dec 11 15:35:56 crc kubenswrapper[4723]: I1211 15:35:56.387243 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerDied","Data":"8408e5fc16243225b2f1d978148c5e5bfa50c4344a88b3ec133ca55731b19ef2"} Dec 11 15:35:59 crc kubenswrapper[4723]: I1211 15:35:59.651121 4723 generic.go:334] "Generic (PLEG): container finished" podID="796ed837-5075-4416-bac4-7b920f3857e3" containerID="adb785d2523c4414bea0d06566986f50fb72179514b1c53987afb4ff3d016800" exitCode=0 Dec 11 15:35:59 crc kubenswrapper[4723]: I1211 15:35:59.651303 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerDied","Data":"adb785d2523c4414bea0d06566986f50fb72179514b1c53987afb4ff3d016800"} Dec 11 15:36:00 crc kubenswrapper[4723]: I1211 15:36:00.683681 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerStarted","Data":"e48b6b7c61ef4fc2aff93e6ce96215598851227139c3323f85e85d8f3b5a6a4c"} Dec 11 15:36:01 crc kubenswrapper[4723]: I1211 15:36:01.697920 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerStarted","Data":"9eb5e9d962a78e5e1d1dfa27980449e2e3ee17464b1e07b02253055fe5ebba23"} Dec 11 15:36:01 crc kubenswrapper[4723]: I1211 15:36:01.703740 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerStarted","Data":"6000d219c199ff9e75844d38988f037cc84980ee0541b1ebfd3f48b0f77523d4"} Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.728157 4723 generic.go:334] "Generic (PLEG): container finished" podID="796ed837-5075-4416-bac4-7b920f3857e3" containerID="6000d219c199ff9e75844d38988f037cc84980ee0541b1ebfd3f48b0f77523d4" exitCode=0 Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.728848 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerDied","Data":"6000d219c199ff9e75844d38988f037cc84980ee0541b1ebfd3f48b0f77523d4"} Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.732655 4723 generic.go:334] "Generic (PLEG): container finished" podID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerID="e48b6b7c61ef4fc2aff93e6ce96215598851227139c3323f85e85d8f3b5a6a4c" exitCode=0 Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.732718 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerDied","Data":"e48b6b7c61ef4fc2aff93e6ce96215598851227139c3323f85e85d8f3b5a6a4c"} Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.748909 4723 generic.go:334] "Generic (PLEG): container finished" podID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerID="9eb5e9d962a78e5e1d1dfa27980449e2e3ee17464b1e07b02253055fe5ebba23" exitCode=0 Dec 11 15:36:02 crc kubenswrapper[4723]: I1211 15:36:02.748961 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerDied","Data":"9eb5e9d962a78e5e1d1dfa27980449e2e3ee17464b1e07b02253055fe5ebba23"} Dec 11 15:36:04 crc kubenswrapper[4723]: I1211 15:36:04.398580 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" containerName="elasticsearch" probeResult="failure" output=< Dec 11 15:36:04 crc kubenswrapper[4723]: {"timestamp": "2025-12-11T15:36:04+00:00", "message": "readiness probe failed", "curl_rc": "7"} Dec 11 15:36:04 crc kubenswrapper[4723]: > Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.114865 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-s9w7v"] Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.115923 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.126004 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-s9w7v"] Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.126121 4723 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-qh28n" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.263711 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-bound-sa-token\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.263897 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfmtv\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-kube-api-access-xfmtv\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.367389 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-bound-sa-token\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.367697 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfmtv\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-kube-api-access-xfmtv\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.381213 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" containerName="elasticsearch" probeResult="failure" output=< Dec 11 15:36:09 crc kubenswrapper[4723]: {"timestamp": "2025-12-11T15:36:09+00:00", "message": "readiness probe failed", "curl_rc": "7"} Dec 11 15:36:09 crc kubenswrapper[4723]: > Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.387819 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-bound-sa-token\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.388052 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfmtv\" (UniqueName: \"kubernetes.io/projected/b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae-kube-api-access-xfmtv\") pod \"cert-manager-86cb77c54b-s9w7v\" (UID: \"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae\") " pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:09 crc kubenswrapper[4723]: I1211 15:36:09.448543 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-s9w7v" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.317272 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.380696 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltpds\" (UniqueName: \"kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds\") pod \"796ed837-5075-4416-bac4-7b920f3857e3\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.380816 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle\") pod \"796ed837-5075-4416-bac4-7b920f3857e3\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.380894 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util\") pod \"796ed837-5075-4416-bac4-7b920f3857e3\" (UID: \"796ed837-5075-4416-bac4-7b920f3857e3\") " Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.384171 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle" (OuterVolumeSpecName: "bundle") pod "796ed837-5075-4416-bac4-7b920f3857e3" (UID: "796ed837-5075-4416-bac4-7b920f3857e3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.389141 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds" (OuterVolumeSpecName: "kube-api-access-ltpds") pod "796ed837-5075-4416-bac4-7b920f3857e3" (UID: "796ed837-5075-4416-bac4-7b920f3857e3"). InnerVolumeSpecName "kube-api-access-ltpds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.391495 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util" (OuterVolumeSpecName: "util") pod "796ed837-5075-4416-bac4-7b920f3857e3" (UID: "796ed837-5075-4416-bac4-7b920f3857e3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.482382 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.482906 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltpds\" (UniqueName: \"kubernetes.io/projected/796ed837-5075-4416-bac4-7b920f3857e3-kube-api-access-ltpds\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.482999 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/796ed837-5075-4416-bac4-7b920f3857e3-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.803783 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" event={"ID":"796ed837-5075-4416-bac4-7b920f3857e3","Type":"ContainerDied","Data":"5a082b0c81b1ae524ae54a51fb3813d2a223b705cd859cda67bcfb547f6c9bb9"} Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.804117 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a082b0c81b1ae524ae54a51fb3813d2a223b705cd859cda67bcfb547f6c9bb9" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.803828 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/2f08b65b065345a6979e3a1c6f35dc26149355da31ce5679c13b3599147sx49" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.805254 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" event={"ID":"157016c0-6dd9-40c5-a132-0efa815cc2a2","Type":"ContainerStarted","Data":"f6845dbdcdf1a4a6471a17bac764ab3b54a81b8d68f8501442612b6e6fe1c2aa"} Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.805415 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.807563 4723 generic.go:334] "Generic (PLEG): container finished" podID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerID="048088ac3e6638c943ce2dd0747366375d7a53de1a175e19e447f792ab39daf1" exitCode=0 Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.807624 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerDied","Data":"048088ac3e6638c943ce2dd0747366375d7a53de1a175e19e447f792ab39daf1"} Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.808765 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" event={"ID":"8648dc98-8bab-4d61-8669-36126c074dae","Type":"ContainerStarted","Data":"4fb6ba5c82069b8e7514eed0b204d3f6aa2517f87158cd21ca062fb7ac95ad1f"} Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.810423 4723 generic.go:334] "Generic (PLEG): container finished" podID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerID="a3775e270c698efb2b7a5969732e352843ec46c708755b3c80bacf789fcdab80" exitCode=0 Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.810451 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerDied","Data":"a3775e270c698efb2b7a5969732e352843ec46c708755b3c80bacf789fcdab80"} Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.828198 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" podStartSLOduration=3.04825553 podStartE2EDuration="22.828177361s" podCreationTimestamp="2025-12-11 15:35:48 +0000 UTC" firstStartedPulling="2025-12-11 15:35:50.669125685 +0000 UTC m=+761.443359120" lastFinishedPulling="2025-12-11 15:36:10.449047516 +0000 UTC m=+781.223280951" observedRunningTime="2025-12-11 15:36:10.824081701 +0000 UTC m=+781.598315126" watchObservedRunningTime="2025-12-11 15:36:10.828177361 +0000 UTC m=+781.602410796" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.863916 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-tc9gs" podStartSLOduration=1.332805368 podStartE2EDuration="19.863896565s" podCreationTimestamp="2025-12-11 15:35:51 +0000 UTC" firstStartedPulling="2025-12-11 15:35:51.918240826 +0000 UTC m=+762.692474261" lastFinishedPulling="2025-12-11 15:36:10.449332023 +0000 UTC m=+781.223565458" observedRunningTime="2025-12-11 15:36:10.860644839 +0000 UTC m=+781.634878284" watchObservedRunningTime="2025-12-11 15:36:10.863896565 +0000 UTC m=+781.638130000" Dec 11 15:36:10 crc kubenswrapper[4723]: I1211 15:36:10.924827 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-s9w7v"] Dec 11 15:36:11 crc kubenswrapper[4723]: I1211 15:36:11.819012 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-s9w7v" event={"ID":"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae","Type":"ContainerStarted","Data":"51c57e54487e1ed442cae1b1cf862eb7c31f31808edb75ff7f79b8177c3bab32"} Dec 11 15:36:11 crc kubenswrapper[4723]: I1211 15:36:11.819372 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-s9w7v" event={"ID":"b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae","Type":"ContainerStarted","Data":"ce8b87dea97e2078780abca8d7f235cb889aa62b1d3e1e694151fa47fec545dc"} Dec 11 15:36:11 crc kubenswrapper[4723]: I1211 15:36:11.837037 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-s9w7v" podStartSLOduration=2.837021659 podStartE2EDuration="2.837021659s" podCreationTimestamp="2025-12-11 15:36:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:36:11.834921633 +0000 UTC m=+782.609155068" watchObservedRunningTime="2025-12-11 15:36:11.837021659 +0000 UTC m=+782.611255084" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.190234 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.196217 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312142 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util\") pod \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312549 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle\") pod \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312572 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85xdh\" (UniqueName: \"kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh\") pod \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312604 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util\") pod \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312690 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle\") pod \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\" (UID: \"7aaa86ba-66d4-44c0-9fbe-e0ba61775643\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312717 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ptkr\" (UniqueName: \"kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr\") pod \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\" (UID: \"6eb9c879-45c4-4082-b6ff-fe25f5a82211\") " Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.312993 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle" (OuterVolumeSpecName: "bundle") pod "6eb9c879-45c4-4082-b6ff-fe25f5a82211" (UID: "6eb9c879-45c4-4082-b6ff-fe25f5a82211"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.313243 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle" (OuterVolumeSpecName: "bundle") pod "7aaa86ba-66d4-44c0-9fbe-e0ba61775643" (UID: "7aaa86ba-66d4-44c0-9fbe-e0ba61775643"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.313757 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.313774 4723 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.323997 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util" (OuterVolumeSpecName: "util") pod "6eb9c879-45c4-4082-b6ff-fe25f5a82211" (UID: "6eb9c879-45c4-4082-b6ff-fe25f5a82211"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.329347 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr" (OuterVolumeSpecName: "kube-api-access-7ptkr") pod "6eb9c879-45c4-4082-b6ff-fe25f5a82211" (UID: "6eb9c879-45c4-4082-b6ff-fe25f5a82211"). InnerVolumeSpecName "kube-api-access-7ptkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.329411 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh" (OuterVolumeSpecName: "kube-api-access-85xdh") pod "7aaa86ba-66d4-44c0-9fbe-e0ba61775643" (UID: "7aaa86ba-66d4-44c0-9fbe-e0ba61775643"). InnerVolumeSpecName "kube-api-access-85xdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.333372 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util" (OuterVolumeSpecName: "util") pod "7aaa86ba-66d4-44c0-9fbe-e0ba61775643" (UID: "7aaa86ba-66d4-44c0-9fbe-e0ba61775643"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.415140 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85xdh\" (UniqueName: \"kubernetes.io/projected/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-kube-api-access-85xdh\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.415176 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7aaa86ba-66d4-44c0-9fbe-e0ba61775643-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.415185 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ptkr\" (UniqueName: \"kubernetes.io/projected/6eb9c879-45c4-4082-b6ff-fe25f5a82211-kube-api-access-7ptkr\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.415193 4723 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6eb9c879-45c4-4082-b6ff-fe25f5a82211-util\") on node \"crc\" DevicePath \"\"" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.825906 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.825948 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt" event={"ID":"6eb9c879-45c4-4082-b6ff-fe25f5a82211","Type":"ContainerDied","Data":"8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22"} Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.826011 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b13ff833f33f159a7578668aa36140ec47be19ea21152c2b8716e1e16173c22" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.828726 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.838322 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/e1d84b395ec4fb24465160a6311ce1c8e0acfd42cc4a49faaeadbc39e6mwxh2" event={"ID":"7aaa86ba-66d4-44c0-9fbe-e0ba61775643","Type":"ContainerDied","Data":"b2a4734a16d1d491adb618b14bbeef29bcb8f6eae1a7916dfc19d8681fe61cf3"} Dec 11 15:36:12 crc kubenswrapper[4723]: I1211 15:36:12.838389 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2a4734a16d1d491adb618b14bbeef29bcb8f6eae1a7916dfc19d8681fe61cf3" Dec 11 15:36:13 crc kubenswrapper[4723]: I1211 15:36:13.745419 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:36:13 crc kubenswrapper[4723]: I1211 15:36:13.745480 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:36:14 crc kubenswrapper[4723]: I1211 15:36:14.320215 4723 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="e6c2c683-4358-47d5-b5cd-e97c588b965e" containerName="elasticsearch" probeResult="failure" output=< Dec 11 15:36:14 crc kubenswrapper[4723]: {"timestamp": "2025-12-11T15:36:14+00:00", "message": "readiness probe failed", "curl_rc": "7"} Dec 11 15:36:14 crc kubenswrapper[4723]: > Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.516944 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-76d45b648-v762h"] Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517484 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517497 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517509 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517515 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517522 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517530 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517540 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517545 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="util" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517558 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517564 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517572 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517578 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517586 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517592 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517601 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517606 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: E1211 15:36:19.517616 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517621 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="pull" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517738 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aaa86ba-66d4-44c0-9fbe-e0ba61775643" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517767 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb9c879-45c4-4082-b6ff-fe25f5a82211" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.517780 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="796ed837-5075-4416-bac4-7b920f3857e3" containerName="extract" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.518197 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.519912 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-jdw8p" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.533504 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-76d45b648-v762h"] Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.561328 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-8jg8v" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.605537 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/567782db-f350-4118-aa1b-c7008a51a4c6-runner\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.605656 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8vzq\" (UniqueName: \"kubernetes.io/projected/567782db-f350-4118-aa1b-c7008a51a4c6-kube-api-access-q8vzq\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.706545 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/567782db-f350-4118-aa1b-c7008a51a4c6-runner\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.706638 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8vzq\" (UniqueName: \"kubernetes.io/projected/567782db-f350-4118-aa1b-c7008a51a4c6-kube-api-access-q8vzq\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.707988 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/567782db-f350-4118-aa1b-c7008a51a4c6-runner\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.734733 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8vzq\" (UniqueName: \"kubernetes.io/projected/567782db-f350-4118-aa1b-c7008a51a4c6-kube-api-access-q8vzq\") pod \"smart-gateway-operator-76d45b648-v762h\" (UID: \"567782db-f350-4118-aa1b-c7008a51a4c6\") " pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.820045 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Dec 11 15:36:19 crc kubenswrapper[4723]: I1211 15:36:19.837444 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" Dec 11 15:36:20 crc kubenswrapper[4723]: I1211 15:36:20.176190 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-76d45b648-v762h"] Dec 11 15:36:20 crc kubenswrapper[4723]: I1211 15:36:20.892754 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" event={"ID":"567782db-f350-4118-aa1b-c7008a51a4c6","Type":"ContainerStarted","Data":"69beb0e809f4a6bceb6763d457b5912512d8e9bc3986609ba7f4745a54a0cb73"} Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.133188 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-74554d9685-hm62n"] Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.134470 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.137360 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-wd5c4" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.160393 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-74554d9685-hm62n"] Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.245937 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-462x7\" (UniqueName: \"kubernetes.io/projected/c53c8e29-2540-4c3c-834d-365514555685-kube-api-access-462x7\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.246034 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/c53c8e29-2540-4c3c-834d-365514555685-runner\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.347039 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-462x7\" (UniqueName: \"kubernetes.io/projected/c53c8e29-2540-4c3c-834d-365514555685-kube-api-access-462x7\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.347136 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/c53c8e29-2540-4c3c-834d-365514555685-runner\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.347594 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/c53c8e29-2540-4c3c-834d-365514555685-runner\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.394657 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-462x7\" (UniqueName: \"kubernetes.io/projected/c53c8e29-2540-4c3c-834d-365514555685-kube-api-access-462x7\") pod \"service-telemetry-operator-74554d9685-hm62n\" (UID: \"c53c8e29-2540-4c3c-834d-365514555685\") " pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:22 crc kubenswrapper[4723]: I1211 15:36:22.476499 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" Dec 11 15:36:23 crc kubenswrapper[4723]: I1211 15:36:23.022311 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-74554d9685-hm62n"] Dec 11 15:36:23 crc kubenswrapper[4723]: W1211 15:36:23.035758 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc53c8e29_2540_4c3c_834d_365514555685.slice/crio-59567d485d723cb5266dd0a0189c63550991acb4f86d1f03683f3d93219db871 WatchSource:0}: Error finding container 59567d485d723cb5266dd0a0189c63550991acb4f86d1f03683f3d93219db871: Status 404 returned error can't find the container with id 59567d485d723cb5266dd0a0189c63550991acb4f86d1f03683f3d93219db871 Dec 11 15:36:23 crc kubenswrapper[4723]: I1211 15:36:23.928333 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" event={"ID":"c53c8e29-2540-4c3c-834d-365514555685","Type":"ContainerStarted","Data":"59567d485d723cb5266dd0a0189c63550991acb4f86d1f03683f3d93219db871"} Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.330007 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-pcngn"] Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.330891 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.334286 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-dscsz" Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.339281 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-pcngn"] Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.482894 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d975t\" (UniqueName: \"kubernetes.io/projected/8997e7f8-505f-4353-af6f-ed7e67fab19c-kube-api-access-d975t\") pod \"interconnect-operator-5bb49f789d-pcngn\" (UID: \"8997e7f8-505f-4353-af6f-ed7e67fab19c\") " pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.583950 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d975t\" (UniqueName: \"kubernetes.io/projected/8997e7f8-505f-4353-af6f-ed7e67fab19c-kube-api-access-d975t\") pod \"interconnect-operator-5bb49f789d-pcngn\" (UID: \"8997e7f8-505f-4353-af6f-ed7e67fab19c\") " pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.617870 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d975t\" (UniqueName: \"kubernetes.io/projected/8997e7f8-505f-4353-af6f-ed7e67fab19c-kube-api-access-d975t\") pod \"interconnect-operator-5bb49f789d-pcngn\" (UID: \"8997e7f8-505f-4353-af6f-ed7e67fab19c\") " pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" Dec 11 15:36:24 crc kubenswrapper[4723]: I1211 15:36:24.655430 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" Dec 11 15:36:43 crc kubenswrapper[4723]: I1211 15:36:43.745507 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:36:43 crc kubenswrapper[4723]: I1211 15:36:43.746056 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:36:43 crc kubenswrapper[4723]: I1211 15:36:43.746114 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:36:43 crc kubenswrapper[4723]: I1211 15:36:43.746681 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:36:43 crc kubenswrapper[4723]: I1211 15:36:43.746733 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e" gracePeriod=600 Dec 11 15:36:44 crc kubenswrapper[4723]: I1211 15:36:44.075359 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e" exitCode=0 Dec 11 15:36:44 crc kubenswrapper[4723]: I1211 15:36:44.075399 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e"} Dec 11 15:36:44 crc kubenswrapper[4723]: I1211 15:36:44.075429 4723 scope.go:117] "RemoveContainer" containerID="5fa3fe424c4893f1bc2976d405633dbdfbe341b0a6fe25c362bac639bab753ca" Dec 11 15:36:44 crc kubenswrapper[4723]: I1211 15:36:44.414857 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-pcngn"] Dec 11 15:36:44 crc kubenswrapper[4723]: E1211 15:36:44.457570 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/infrawatch/service-telemetry-operator:latest" Dec 11 15:36:44 crc kubenswrapper[4723]: E1211 15:36:44.457708 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/service-telemetry-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:service-telemetry-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_WEBHOOK_SNMP_IMAGE,Value:quay.io/infrawatch/prometheus-webhook-snmp:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_IMAGE,Value:quay.io/prometheus/prometheus:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER_IMAGE,Value:quay.io/prometheus/alertmanager:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:service-telemetry-operator.v1.5.1765406856,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-462x7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod service-telemetry-operator-74554d9685-hm62n_service-telemetry(c53c8e29-2540-4c3c-834d-365514555685): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 11 15:36:44 crc kubenswrapper[4723]: E1211 15:36:44.458948 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" podUID="c53c8e29-2540-4c3c-834d-365514555685" Dec 11 15:36:45 crc kubenswrapper[4723]: I1211 15:36:45.083729 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" event={"ID":"8997e7f8-505f-4353-af6f-ed7e67fab19c","Type":"ContainerStarted","Data":"36888266b17aae834b05c3332037ea5d9b759004c83b2df32e30fd238b38a618"} Dec 11 15:36:45 crc kubenswrapper[4723]: E1211 15:36:45.086360 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/service-telemetry-operator:latest\\\"\"" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" podUID="c53c8e29-2540-4c3c-834d-365514555685" Dec 11 15:36:49 crc kubenswrapper[4723]: E1211 15:36:49.611007 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Dec 11 15:36:49 crc kubenswrapper[4723]: E1211 15:36:49.611487 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1765406851,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q8vzq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-76d45b648-v762h_service-telemetry(567782db-f350-4118-aa1b-c7008a51a4c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 15:36:49 crc kubenswrapper[4723]: E1211 15:36:49.612629 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" podUID="567782db-f350-4118-aa1b-c7008a51a4c6" Dec 11 15:36:50 crc kubenswrapper[4723]: I1211 15:36:50.136520 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e"} Dec 11 15:36:50 crc kubenswrapper[4723]: E1211 15:36:50.138921 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" podUID="567782db-f350-4118-aa1b-c7008a51a4c6" Dec 11 15:36:55 crc kubenswrapper[4723]: I1211 15:36:55.750634 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" event={"ID":"8997e7f8-505f-4353-af6f-ed7e67fab19c","Type":"ContainerStarted","Data":"45eb8153a865b860559e62b4cba628f3986e3edb8d3f32953eb05605bd567bf6"} Dec 11 15:36:55 crc kubenswrapper[4723]: I1211 15:36:55.767056 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-pcngn" podStartSLOduration=21.141234692 podStartE2EDuration="31.767041521s" podCreationTimestamp="2025-12-11 15:36:24 +0000 UTC" firstStartedPulling="2025-12-11 15:36:44.429351491 +0000 UTC m=+815.203584936" lastFinishedPulling="2025-12-11 15:36:55.05515833 +0000 UTC m=+825.829391765" observedRunningTime="2025-12-11 15:36:55.765039897 +0000 UTC m=+826.539273332" watchObservedRunningTime="2025-12-11 15:36:55.767041521 +0000 UTC m=+826.541274956" Dec 11 15:37:02 crc kubenswrapper[4723]: I1211 15:37:02.787981 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" event={"ID":"567782db-f350-4118-aa1b-c7008a51a4c6","Type":"ContainerStarted","Data":"e8ea123b8ebe4be09bda1f6850e755439ca1a3a050b5729a25c3ef1167f4168e"} Dec 11 15:37:02 crc kubenswrapper[4723]: I1211 15:37:02.806326 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-76d45b648-v762h" podStartSLOduration=1.397488817 podStartE2EDuration="43.806305085s" podCreationTimestamp="2025-12-11 15:36:19 +0000 UTC" firstStartedPulling="2025-12-11 15:36:20.191026548 +0000 UTC m=+790.965259983" lastFinishedPulling="2025-12-11 15:37:02.599842816 +0000 UTC m=+833.374076251" observedRunningTime="2025-12-11 15:37:02.805659457 +0000 UTC m=+833.579892902" watchObservedRunningTime="2025-12-11 15:37:02.806305085 +0000 UTC m=+833.580538530" Dec 11 15:37:09 crc kubenswrapper[4723]: I1211 15:37:09.850412 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" event={"ID":"c53c8e29-2540-4c3c-834d-365514555685","Type":"ContainerStarted","Data":"b598a3f05fb567335e3fad515de4a7a2c4a4ecdb4ba73885fe62373d6eee8af2"} Dec 11 15:37:09 crc kubenswrapper[4723]: I1211 15:37:09.867009 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-74554d9685-hm62n" podStartSLOduration=1.267202151 podStartE2EDuration="47.866990061s" podCreationTimestamp="2025-12-11 15:36:22 +0000 UTC" firstStartedPulling="2025-12-11 15:36:23.03783619 +0000 UTC m=+793.812069625" lastFinishedPulling="2025-12-11 15:37:09.6376241 +0000 UTC m=+840.411857535" observedRunningTime="2025-12-11 15:37:09.865907952 +0000 UTC m=+840.640141387" watchObservedRunningTime="2025-12-11 15:37:09.866990061 +0000 UTC m=+840.641223496" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.436633 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.438598 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.447523 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.542359 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.542408 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.542428 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvvx6\" (UniqueName: \"kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.643719 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.643767 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.643786 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvvx6\" (UniqueName: \"kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.644398 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.644526 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.667017 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvvx6\" (UniqueName: \"kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6\") pod \"community-operators-n7b46\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.759045 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:24 crc kubenswrapper[4723]: I1211 15:37:24.992131 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:25 crc kubenswrapper[4723]: I1211 15:37:25.950283 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerStarted","Data":"38402315ff38d3fa478cb0cbb26cc8e62d257de1ccc88bf36af955c95fe13329"} Dec 11 15:37:27 crc kubenswrapper[4723]: I1211 15:37:27.963731 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerStarted","Data":"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb"} Dec 11 15:37:28 crc kubenswrapper[4723]: I1211 15:37:28.970399 4723 generic.go:334] "Generic (PLEG): container finished" podID="e30926e2-c841-474f-9478-f04288f0baaf" containerID="e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb" exitCode=0 Dec 11 15:37:28 crc kubenswrapper[4723]: I1211 15:37:28.970456 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerDied","Data":"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb"} Dec 11 15:37:30 crc kubenswrapper[4723]: E1211 15:37:30.598818 4723 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.051s" Dec 11 15:37:41 crc kubenswrapper[4723]: I1211 15:37:41.501729 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerStarted","Data":"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51"} Dec 11 15:37:42 crc kubenswrapper[4723]: I1211 15:37:42.510420 4723 generic.go:334] "Generic (PLEG): container finished" podID="e30926e2-c841-474f-9478-f04288f0baaf" containerID="a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51" exitCode=0 Dec 11 15:37:42 crc kubenswrapper[4723]: I1211 15:37:42.510469 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerDied","Data":"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51"} Dec 11 15:37:44 crc kubenswrapper[4723]: I1211 15:37:44.525215 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerStarted","Data":"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5"} Dec 11 15:37:44 crc kubenswrapper[4723]: I1211 15:37:44.550711 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n7b46" podStartSLOduration=5.922719366 podStartE2EDuration="20.549942331s" podCreationTimestamp="2025-12-11 15:37:24 +0000 UTC" firstStartedPulling="2025-12-11 15:37:28.972012171 +0000 UTC m=+859.746245606" lastFinishedPulling="2025-12-11 15:37:43.599235136 +0000 UTC m=+874.373468571" observedRunningTime="2025-12-11 15:37:44.541395653 +0000 UTC m=+875.315629088" watchObservedRunningTime="2025-12-11 15:37:44.549942331 +0000 UTC m=+875.324175766" Dec 11 15:37:44 crc kubenswrapper[4723]: I1211 15:37:44.759244 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:44 crc kubenswrapper[4723]: I1211 15:37:44.759303 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:45 crc kubenswrapper[4723]: I1211 15:37:45.812268 4723 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-n7b46" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="registry-server" probeResult="failure" output=< Dec 11 15:37:45 crc kubenswrapper[4723]: timeout: failed to connect service ":50051" within 1s Dec 11 15:37:45 crc kubenswrapper[4723]: > Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.192061 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.193230 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: W1211 15:37:46.195138 4723 reflector.go:561] object-"service-telemetry"/"default-interconnect-openstack-credentials": failed to list *v1.Secret: secrets "default-interconnect-openstack-credentials" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "service-telemetry": no relationship found between node 'crc' and this object Dec 11 15:37:46 crc kubenswrapper[4723]: E1211 15:37:46.195185 4723 reflector.go:158] "Unhandled Error" err="object-\"service-telemetry\"/\"default-interconnect-openstack-credentials\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-interconnect-openstack-credentials\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"service-telemetry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:37:46 crc kubenswrapper[4723]: W1211 15:37:46.195452 4723 reflector.go:561] object-"service-telemetry"/"default-interconnect-inter-router-credentials": failed to list *v1.Secret: secrets "default-interconnect-inter-router-credentials" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "service-telemetry": no relationship found between node 'crc' and this object Dec 11 15:37:46 crc kubenswrapper[4723]: E1211 15:37:46.195487 4723 reflector.go:158] "Unhandled Error" err="object-\"service-telemetry\"/\"default-interconnect-inter-router-credentials\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-interconnect-inter-router-credentials\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"service-telemetry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.196138 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.196235 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.196298 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.196425 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-2g4np" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.197577 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.208021 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.367677 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsgbm\" (UniqueName: \"kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.367785 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.367819 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.367849 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.367940 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.368009 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.368119 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.469712 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470062 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470094 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470123 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470153 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470202 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.470250 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsgbm\" (UniqueName: \"kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.471124 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.476312 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.476759 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.483754 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:46 crc kubenswrapper[4723]: I1211 15:37:46.487431 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsgbm\" (UniqueName: \"kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.032432 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.045155 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.227580 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.234099 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6md6t\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.408778 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:37:47 crc kubenswrapper[4723]: I1211 15:37:47.841007 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:37:47 crc kubenswrapper[4723]: W1211 15:37:47.842348 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c0558d6_3251_42b5_a202_12e388f9fdf0.slice/crio-d969d5effff59d0034ef95580a59cadef3956b4f61ae2f23e74931278c08a426 WatchSource:0}: Error finding container d969d5effff59d0034ef95580a59cadef3956b4f61ae2f23e74931278c08a426: Status 404 returned error can't find the container with id d969d5effff59d0034ef95580a59cadef3956b4f61ae2f23e74931278c08a426 Dec 11 15:37:48 crc kubenswrapper[4723]: I1211 15:37:48.561466 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" event={"ID":"6c0558d6-3251-42b5-a202-12e388f9fdf0","Type":"ContainerStarted","Data":"d969d5effff59d0034ef95580a59cadef3956b4f61ae2f23e74931278c08a426"} Dec 11 15:37:54 crc kubenswrapper[4723]: I1211 15:37:54.835790 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:54 crc kubenswrapper[4723]: I1211 15:37:54.899334 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:55 crc kubenswrapper[4723]: I1211 15:37:55.066682 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:56 crc kubenswrapper[4723]: I1211 15:37:56.633508 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n7b46" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="registry-server" containerID="cri-o://f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5" gracePeriod=2 Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.296478 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.325683 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvvx6\" (UniqueName: \"kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6\") pod \"e30926e2-c841-474f-9478-f04288f0baaf\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.325778 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities\") pod \"e30926e2-c841-474f-9478-f04288f0baaf\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.325852 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content\") pod \"e30926e2-c841-474f-9478-f04288f0baaf\" (UID: \"e30926e2-c841-474f-9478-f04288f0baaf\") " Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.327080 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities" (OuterVolumeSpecName: "utilities") pod "e30926e2-c841-474f-9478-f04288f0baaf" (UID: "e30926e2-c841-474f-9478-f04288f0baaf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.332923 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6" (OuterVolumeSpecName: "kube-api-access-wvvx6") pod "e30926e2-c841-474f-9478-f04288f0baaf" (UID: "e30926e2-c841-474f-9478-f04288f0baaf"). InnerVolumeSpecName "kube-api-access-wvvx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.375262 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e30926e2-c841-474f-9478-f04288f0baaf" (UID: "e30926e2-c841-474f-9478-f04288f0baaf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.427713 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvvx6\" (UniqueName: \"kubernetes.io/projected/e30926e2-c841-474f-9478-f04288f0baaf-kube-api-access-wvvx6\") on node \"crc\" DevicePath \"\"" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.428034 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.428143 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30926e2-c841-474f-9478-f04288f0baaf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.642443 4723 generic.go:334] "Generic (PLEG): container finished" podID="e30926e2-c841-474f-9478-f04288f0baaf" containerID="f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5" exitCode=0 Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.642477 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerDied","Data":"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5"} Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.642527 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n7b46" event={"ID":"e30926e2-c841-474f-9478-f04288f0baaf","Type":"ContainerDied","Data":"38402315ff38d3fa478cb0cbb26cc8e62d257de1ccc88bf36af955c95fe13329"} Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.642546 4723 scope.go:117] "RemoveContainer" containerID="f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.643662 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n7b46" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.659263 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.662929 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n7b46"] Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.665537 4723 scope.go:117] "RemoveContainer" containerID="a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.684153 4723 scope.go:117] "RemoveContainer" containerID="e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.700181 4723 scope.go:117] "RemoveContainer" containerID="f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5" Dec 11 15:37:57 crc kubenswrapper[4723]: E1211 15:37:57.700591 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5\": container with ID starting with f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5 not found: ID does not exist" containerID="f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.700627 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5"} err="failed to get container status \"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5\": rpc error: code = NotFound desc = could not find container \"f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5\": container with ID starting with f46ef792ba0f5c490e2b61da8b655f8bdb8fac867510f7aadf09daa96d7f8fd5 not found: ID does not exist" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.700646 4723 scope.go:117] "RemoveContainer" containerID="a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51" Dec 11 15:37:57 crc kubenswrapper[4723]: E1211 15:37:57.700882 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51\": container with ID starting with a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51 not found: ID does not exist" containerID="a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.700909 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51"} err="failed to get container status \"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51\": rpc error: code = NotFound desc = could not find container \"a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51\": container with ID starting with a740346c166a776d57cd16b2b36ae15ccee34cc1ebc3f8ccbf7672d853eb3b51 not found: ID does not exist" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.700924 4723 scope.go:117] "RemoveContainer" containerID="e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb" Dec 11 15:37:57 crc kubenswrapper[4723]: E1211 15:37:57.701155 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb\": container with ID starting with e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb not found: ID does not exist" containerID="e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb" Dec 11 15:37:57 crc kubenswrapper[4723]: I1211 15:37:57.701188 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb"} err="failed to get container status \"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb\": rpc error: code = NotFound desc = could not find container \"e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb\": container with ID starting with e208c5fdda49bc74e4de0dcd2ad634ed3c4630b54104404166b87d41179beefb not found: ID does not exist" Dec 11 15:37:58 crc kubenswrapper[4723]: I1211 15:37:58.650723 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" event={"ID":"6c0558d6-3251-42b5-a202-12e388f9fdf0","Type":"ContainerStarted","Data":"e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232"} Dec 11 15:37:59 crc kubenswrapper[4723]: I1211 15:37:59.554839 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30926e2-c841-474f-9478-f04288f0baaf" path="/var/lib/kubelet/pods/e30926e2-c841-474f-9478-f04288f0baaf/volumes" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.245629 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" podStartSLOduration=4.828025487 podStartE2EDuration="14.245586472s" podCreationTimestamp="2025-12-11 15:37:46 +0000 UTC" firstStartedPulling="2025-12-11 15:37:47.844613564 +0000 UTC m=+878.618846989" lastFinishedPulling="2025-12-11 15:37:57.262174539 +0000 UTC m=+888.036407974" observedRunningTime="2025-12-11 15:37:58.67288963 +0000 UTC m=+889.447123065" watchObservedRunningTime="2025-12-11 15:38:00.245586472 +0000 UTC m=+891.019819907" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.248774 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.249042 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="extract-utilities" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.249062 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="extract-utilities" Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.249081 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="registry-server" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.249090 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="registry-server" Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.249110 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="extract-content" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.249117 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="extract-content" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.249245 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30926e2-c841-474f-9478-f04288f0baaf" containerName="registry-server" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.250491 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.253471 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.253872 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.254041 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.254846 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-g7v9d" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.254849 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.255526 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.255627 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.255841 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.265197 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361284 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config-out\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361336 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361360 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361383 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhpwd\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-kube-api-access-rhpwd\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361430 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361458 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361481 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-tls-assets\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361514 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361534 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-web-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.361556 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473549 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473674 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473703 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-tls-assets\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473753 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473781 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-web-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473818 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473843 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config-out\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473863 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473885 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.473912 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhpwd\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-kube-api-access-rhpwd\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.474077 4723 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.474174 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls podName:917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:00.974152941 +0000 UTC m=+891.748386366 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123") : secret "default-prometheus-proxy-tls" not found Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.474758 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.475650 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.478108 4723 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.478159 4723 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a0c0966b25af36c068f4a010d0901d6a4b49b119c45e15389f88354d38dd5df8/globalmount\"" pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.480387 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config-out\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.480628 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.483092 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-tls-assets\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.484244 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.484524 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-web-config\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.492514 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhpwd\" (UniqueName: \"kubernetes.io/projected/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-kube-api-access-rhpwd\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.503421 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b111b80c-75a4-44d5-a9f4-a3ab0d3afa1e\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: I1211 15:38:00.981292 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.981489 4723 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Dec 11 15:38:00 crc kubenswrapper[4723]: E1211 15:38:00.981895 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls podName:917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:01.981850963 +0000 UTC m=+892.756084398 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123") : secret "default-prometheus-proxy-tls" not found Dec 11 15:38:01 crc kubenswrapper[4723]: I1211 15:38:01.997285 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:02 crc kubenswrapper[4723]: I1211 15:38:02.011340 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123\") " pod="service-telemetry/prometheus-default-0" Dec 11 15:38:02 crc kubenswrapper[4723]: I1211 15:38:02.070363 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Dec 11 15:38:02 crc kubenswrapper[4723]: I1211 15:38:02.291835 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Dec 11 15:38:02 crc kubenswrapper[4723]: W1211 15:38:02.299810 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod917c8eb2_66c9_4b10_b8f5_1c3b6e3c7123.slice/crio-09bf2234764d05f4f29a28a718581ba2f8270c69e75b5ba16e7c8c63cbbf6232 WatchSource:0}: Error finding container 09bf2234764d05f4f29a28a718581ba2f8270c69e75b5ba16e7c8c63cbbf6232: Status 404 returned error can't find the container with id 09bf2234764d05f4f29a28a718581ba2f8270c69e75b5ba16e7c8c63cbbf6232 Dec 11 15:38:02 crc kubenswrapper[4723]: I1211 15:38:02.677597 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerStarted","Data":"09bf2234764d05f4f29a28a718581ba2f8270c69e75b5ba16e7c8c63cbbf6232"} Dec 11 15:38:06 crc kubenswrapper[4723]: I1211 15:38:06.711731 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerStarted","Data":"0a551b3ae2476d53a08477d5c0ba4cc2e389eddef4aedc07bd8b8e88b801ebcf"} Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.419069 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq"] Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.420317 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.427090 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq"] Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.554708 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbnkq\" (UniqueName: \"kubernetes.io/projected/d86b58d7-d8ba-4d6b-8b06-3013e693f293-kube-api-access-wbnkq\") pod \"default-snmp-webhook-78bcbbdcff-pvfdq\" (UID: \"d86b58d7-d8ba-4d6b-8b06-3013e693f293\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.656474 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbnkq\" (UniqueName: \"kubernetes.io/projected/d86b58d7-d8ba-4d6b-8b06-3013e693f293-kube-api-access-wbnkq\") pod \"default-snmp-webhook-78bcbbdcff-pvfdq\" (UID: \"d86b58d7-d8ba-4d6b-8b06-3013e693f293\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.676033 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbnkq\" (UniqueName: \"kubernetes.io/projected/d86b58d7-d8ba-4d6b-8b06-3013e693f293-kube-api-access-wbnkq\") pod \"default-snmp-webhook-78bcbbdcff-pvfdq\" (UID: \"d86b58d7-d8ba-4d6b-8b06-3013e693f293\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" Dec 11 15:38:12 crc kubenswrapper[4723]: I1211 15:38:12.736234 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" Dec 11 15:38:13 crc kubenswrapper[4723]: I1211 15:38:13.131952 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq"] Dec 11 15:38:13 crc kubenswrapper[4723]: W1211 15:38:13.139125 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd86b58d7_d8ba_4d6b_8b06_3013e693f293.slice/crio-1087f1d63a7be10e7d7407554b37178075993c26cecbae032a45b2653347710a WatchSource:0}: Error finding container 1087f1d63a7be10e7d7407554b37178075993c26cecbae032a45b2653347710a: Status 404 returned error can't find the container with id 1087f1d63a7be10e7d7407554b37178075993c26cecbae032a45b2653347710a Dec 11 15:38:13 crc kubenswrapper[4723]: I1211 15:38:13.770258 4723 generic.go:334] "Generic (PLEG): container finished" podID="917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123" containerID="0a551b3ae2476d53a08477d5c0ba4cc2e389eddef4aedc07bd8b8e88b801ebcf" exitCode=0 Dec 11 15:38:13 crc kubenswrapper[4723]: I1211 15:38:13.770366 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerDied","Data":"0a551b3ae2476d53a08477d5c0ba4cc2e389eddef4aedc07bd8b8e88b801ebcf"} Dec 11 15:38:13 crc kubenswrapper[4723]: I1211 15:38:13.772798 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" event={"ID":"d86b58d7-d8ba-4d6b-8b06-3013e693f293","Type":"ContainerStarted","Data":"1087f1d63a7be10e7d7407554b37178075993c26cecbae032a45b2653347710a"} Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.865491 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.867641 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871018 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871173 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-pc8mv" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871435 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871487 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871190 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.871236 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.890819 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956665 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7rwr\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-kube-api-access-t7rwr\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956724 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-out\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956773 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956811 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0debebf2-456a-4e71-96f4-08608290e9e9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0debebf2-456a-4e71-96f4-08608290e9e9\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956834 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956884 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-tls-assets\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956914 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-web-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956943 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:19 crc kubenswrapper[4723]: I1211 15:38:19.956981 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-volume\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058023 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7rwr\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-kube-api-access-t7rwr\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058067 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-out\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058126 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058158 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0debebf2-456a-4e71-96f4-08608290e9e9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0debebf2-456a-4e71-96f4-08608290e9e9\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058174 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058207 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-tls-assets\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058230 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-web-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058257 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.058281 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-volume\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.063954 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-web-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.064172 4723 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.064207 4723 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0debebf2-456a-4e71-96f4-08608290e9e9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0debebf2-456a-4e71-96f4-08608290e9e9\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0d52db13663c3610b7696553a087e36ceb213c2734436ae03ddc444ae383bf41/globalmount\"" pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.065374 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.065518 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-volume\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.065895 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.066707 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/667fd4db-55d2-4e83-8f5b-73ffd8051429-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.066924 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/667fd4db-55d2-4e83-8f5b-73ffd8051429-config-out\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.067098 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-tls-assets\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.084904 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7rwr\" (UniqueName: \"kubernetes.io/projected/667fd4db-55d2-4e83-8f5b-73ffd8051429-kube-api-access-t7rwr\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.100107 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0debebf2-456a-4e71-96f4-08608290e9e9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0debebf2-456a-4e71-96f4-08608290e9e9\") pod \"alertmanager-default-0\" (UID: \"667fd4db-55d2-4e83-8f5b-73ffd8051429\") " pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:20 crc kubenswrapper[4723]: I1211 15:38:20.188369 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Dec 11 15:38:28 crc kubenswrapper[4723]: E1211 15:38:28.213171 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/prometheus-webhook-snmp:latest" Dec 11 15:38:28 crc kubenswrapper[4723]: E1211 15:38:28.213682 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-webhook-snmp,Image:quay.io/infrawatch/prometheus-webhook-snmp:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:,HostPort:0,ContainerPort:9099,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:SNMP_COMMUNITY,Value:public,ValueFrom:nil,},EnvVar{Name:SNMP_RETRIES,Value:5,ValueFrom:nil,},EnvVar{Name:SNMP_HOST,Value:192.168.24.254,ValueFrom:nil,},EnvVar{Name:SNMP_PORT,Value:162,ValueFrom:nil,},EnvVar{Name:SNMP_TIMEOUT,Value:1,ValueFrom:nil,},EnvVar{Name:ALERT_OID_LABEL,Value:oid,ValueFrom:nil,},EnvVar{Name:TRAP_OID_PREFIX,Value:1.3.6.1.4.1.50495.15,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_OID,Value:1.3.6.1.4.1.50495.15.1.2.1,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_SEVERITY,Value:,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wbnkq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-snmp-webhook-78bcbbdcff-pvfdq_service-telemetry(d86b58d7-d8ba-4d6b-8b06-3013e693f293): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 15:38:28 crc kubenswrapper[4723]: E1211 15:38:28.214926 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" podUID="d86b58d7-d8ba-4d6b-8b06-3013e693f293" Dec 11 15:38:28 crc kubenswrapper[4723]: E1211 15:38:28.969192 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/prometheus-webhook-snmp:latest\\\"\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" podUID="d86b58d7-d8ba-4d6b-8b06-3013e693f293" Dec 11 15:38:31 crc kubenswrapper[4723]: I1211 15:38:31.535416 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Dec 11 15:38:33 crc kubenswrapper[4723]: I1211 15:38:33.994429 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerStarted","Data":"668c3585a73b852f1201ae6038761607ca1f34684008fdeef9479fef16d40f57"} Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.611624 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9"] Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.613295 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.615209 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.615454 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.615658 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.617639 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-2xlwp" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.628192 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9"] Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.797618 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvx2x\" (UniqueName: \"kubernetes.io/projected/5ef35e91-5f1f-4463-aded-49de8810267a-kube-api-access-wvx2x\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.797665 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.798181 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ef35e91-5f1f-4463-aded-49de8810267a-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.798234 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.798371 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ef35e91-5f1f-4463-aded-49de8810267a-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.899518 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ef35e91-5f1f-4463-aded-49de8810267a-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.899580 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvx2x\" (UniqueName: \"kubernetes.io/projected/5ef35e91-5f1f-4463-aded-49de8810267a-kube-api-access-wvx2x\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.899602 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.899622 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ef35e91-5f1f-4463-aded-49de8810267a-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.899659 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: E1211 15:38:36.899793 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 11 15:38:36 crc kubenswrapper[4723]: E1211 15:38:36.899850 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls podName:5ef35e91-5f1f-4463-aded-49de8810267a nodeName:}" failed. No retries permitted until 2025-12-11 15:38:37.399832344 +0000 UTC m=+928.174065779 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" (UID: "5ef35e91-5f1f-4463-aded-49de8810267a") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.900274 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ef35e91-5f1f-4463-aded-49de8810267a-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.900627 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ef35e91-5f1f-4463-aded-49de8810267a-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.911711 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:36 crc kubenswrapper[4723]: I1211 15:38:36.922699 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvx2x\" (UniqueName: \"kubernetes.io/projected/5ef35e91-5f1f-4463-aded-49de8810267a-kube-api-access-wvx2x\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:37 crc kubenswrapper[4723]: I1211 15:38:37.406989 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:37 crc kubenswrapper[4723]: E1211 15:38:37.407134 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Dec 11 15:38:37 crc kubenswrapper[4723]: E1211 15:38:37.408094 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls podName:5ef35e91-5f1f-4463-aded-49de8810267a nodeName:}" failed. No retries permitted until 2025-12-11 15:38:38.40806912 +0000 UTC m=+929.182302555 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" (UID: "5ef35e91-5f1f-4463-aded-49de8810267a") : secret "default-cloud1-coll-meter-proxy-tls" not found Dec 11 15:38:38 crc kubenswrapper[4723]: I1211 15:38:38.418464 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:38 crc kubenswrapper[4723]: I1211 15:38:38.434602 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5ef35e91-5f1f-4463-aded-49de8810267a-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9\" (UID: \"5ef35e91-5f1f-4463-aded-49de8810267a\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:38 crc kubenswrapper[4723]: I1211 15:38:38.436694 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" Dec 11 15:38:38 crc kubenswrapper[4723]: E1211 15:38:38.458268 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="quay.io/prometheus/prometheus:latest" Dec 11 15:38:38 crc kubenswrapper[4723]: E1211 15:38:38.458510 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus,Image:quay.io/prometheus/prometheus:latest,Command:[],Args:[--config.file=/etc/prometheus/config_out/prometheus.env.yaml --web.enable-lifecycle --web.route-prefix=/ --web.listen-address=127.0.0.1:9090 --storage.tsdb.retention.time=24h --storage.tsdb.path=/prometheus --web.config.file=/etc/prometheus/web_config/web-config.yaml],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-out,ReadOnly:true,MountPath:/etc/prometheus/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-assets,ReadOnly:true,MountPath:/etc/prometheus/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-db,ReadOnly:false,MountPath:/prometheus,SubPath:prometheus-db,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secret-default-prometheus-proxy-tls,ReadOnly:true,MountPath:/etc/prometheus/secrets/default-prometheus-proxy-tls,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secret-default-session-secret,ReadOnly:true,MountPath:/etc/prometheus/secrets/default-session-secret,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:configmap-serving-certs-ca-bundle,ReadOnly:true,MountPath:/etc/prometheus/configmaps/serving-certs-ca-bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-rulefiles-0,ReadOnly:false,MountPath:/etc/prometheus/rules/prometheus-default-rulefiles-0,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:web-config,ReadOnly:true,MountPath:/etc/prometheus/web_config/web-config.yaml,SubPath:web-config.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rhpwd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/healthy; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/healthy; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/ready; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/ready; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/ready; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/ready; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:15,SuccessThreshold:1,FailureThreshold:60,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-default-0_service-telemetry(917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:38:38 crc kubenswrapper[4723]: I1211 15:38:38.936401 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9"] Dec 11 15:38:38 crc kubenswrapper[4723]: W1211 15:38:38.944679 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ef35e91_5f1f_4463_aded_49de8810267a.slice/crio-9ae11df8d65e48cde216c505fd83142680666e50abec08511b7edccbf2874b87 WatchSource:0}: Error finding container 9ae11df8d65e48cde216c505fd83142680666e50abec08511b7edccbf2874b87: Status 404 returned error can't find the container with id 9ae11df8d65e48cde216c505fd83142680666e50abec08511b7edccbf2874b87 Dec 11 15:38:39 crc kubenswrapper[4723]: I1211 15:38:39.038654 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerStarted","Data":"9ae11df8d65e48cde216c505fd83142680666e50abec08511b7edccbf2874b87"} Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.627512 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4"] Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.630560 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.632994 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.641274 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4"] Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.644937 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.650450 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.650545 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.650573 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/be4db1ae-f8d8-4739-ac2c-148857809916-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.650606 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nc7h\" (UniqueName: \"kubernetes.io/projected/be4db1ae-f8d8-4739-ac2c-148857809916-kube-api-access-8nc7h\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.650645 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/be4db1ae-f8d8-4739-ac2c-148857809916-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.751687 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nc7h\" (UniqueName: \"kubernetes.io/projected/be4db1ae-f8d8-4739-ac2c-148857809916-kube-api-access-8nc7h\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.751748 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/be4db1ae-f8d8-4739-ac2c-148857809916-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.751810 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.751860 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.751883 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/be4db1ae-f8d8-4739-ac2c-148857809916-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.752754 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/be4db1ae-f8d8-4739-ac2c-148857809916-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: E1211 15:38:40.753312 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:40 crc kubenswrapper[4723]: E1211 15:38:40.753399 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls podName:be4db1ae-f8d8-4739-ac2c-148857809916 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:41.253380937 +0000 UTC m=+932.027614372 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" (UID: "be4db1ae-f8d8-4739-ac2c-148857809916") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.753531 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/be4db1ae-f8d8-4739-ac2c-148857809916-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.765833 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:40 crc kubenswrapper[4723]: I1211 15:38:40.775166 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nc7h\" (UniqueName: \"kubernetes.io/projected/be4db1ae-f8d8-4739-ac2c-148857809916-kube-api-access-8nc7h\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:41 crc kubenswrapper[4723]: I1211 15:38:41.057599 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerStarted","Data":"2391babd88a5207e14fc6e9ee7b422792370469121979058026e1962a974f013"} Dec 11 15:38:41 crc kubenswrapper[4723]: I1211 15:38:41.259056 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:41 crc kubenswrapper[4723]: E1211 15:38:41.259380 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:41 crc kubenswrapper[4723]: E1211 15:38:41.259449 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls podName:be4db1ae-f8d8-4739-ac2c-148857809916 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:42.259429835 +0000 UTC m=+933.033663310 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" (UID: "be4db1ae-f8d8-4739-ac2c-148857809916") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:42 crc kubenswrapper[4723]: I1211 15:38:42.071559 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerStarted","Data":"a35b2d878732f9f72df5750de527d49a69448efb07ce0b014bdcb21123e360af"} Dec 11 15:38:42 crc kubenswrapper[4723]: I1211 15:38:42.285934 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:42 crc kubenswrapper[4723]: E1211 15:38:42.286148 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:42 crc kubenswrapper[4723]: E1211 15:38:42.286203 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls podName:be4db1ae-f8d8-4739-ac2c-148857809916 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:44.286185633 +0000 UTC m=+935.060419068 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" (UID: "be4db1ae-f8d8-4739-ac2c-148857809916") : secret "default-cloud1-ceil-meter-proxy-tls" not found Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.021670 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d"] Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.023158 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.025109 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.025718 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.032167 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d"] Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.214031 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxcj6\" (UniqueName: \"kubernetes.io/projected/5bd859dc-e15a-476e-8a7c-6e9c8effba35-kube-api-access-wxcj6\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.214322 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5bd859dc-e15a-476e-8a7c-6e9c8effba35-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.214384 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5bd859dc-e15a-476e-8a7c-6e9c8effba35-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.214402 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.214455 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316307 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316382 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5bd859dc-e15a-476e-8a7c-6e9c8effba35-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316408 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316441 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316501 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxcj6\" (UniqueName: \"kubernetes.io/projected/5bd859dc-e15a-476e-8a7c-6e9c8effba35-kube-api-access-wxcj6\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.316555 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5bd859dc-e15a-476e-8a7c-6e9c8effba35-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: E1211 15:38:44.317069 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 11 15:38:44 crc kubenswrapper[4723]: E1211 15:38:44.317139 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls podName:5bd859dc-e15a-476e-8a7c-6e9c8effba35 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:44.817121223 +0000 UTC m=+935.591354658 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" (UID: "5bd859dc-e15a-476e-8a7c-6e9c8effba35") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.317316 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5bd859dc-e15a-476e-8a7c-6e9c8effba35-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.317425 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5bd859dc-e15a-476e-8a7c-6e9c8effba35-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.324513 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/be4db1ae-f8d8-4739-ac2c-148857809916-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4\" (UID: \"be4db1ae-f8d8-4739-ac2c-148857809916\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.339668 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.363898 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxcj6\" (UniqueName: \"kubernetes.io/projected/5bd859dc-e15a-476e-8a7c-6e9c8effba35-kube-api-access-wxcj6\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.561084 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" Dec 11 15:38:44 crc kubenswrapper[4723]: I1211 15:38:44.832931 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:44 crc kubenswrapper[4723]: E1211 15:38:44.833127 4723 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Dec 11 15:38:44 crc kubenswrapper[4723]: E1211 15:38:44.833347 4723 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls podName:5bd859dc-e15a-476e-8a7c-6e9c8effba35 nodeName:}" failed. No retries permitted until 2025-12-11 15:38:45.833328123 +0000 UTC m=+936.607561548 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" (UID: "5bd859dc-e15a-476e-8a7c-6e9c8effba35") : secret "default-cloud1-sens-meter-proxy-tls" not found Dec 11 15:38:45 crc kubenswrapper[4723]: I1211 15:38:45.078275 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4"] Dec 11 15:38:45 crc kubenswrapper[4723]: I1211 15:38:45.845379 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:45 crc kubenswrapper[4723]: I1211 15:38:45.851048 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5bd859dc-e15a-476e-8a7c-6e9c8effba35-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d\" (UID: \"5bd859dc-e15a-476e-8a7c-6e9c8effba35\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:46 crc kubenswrapper[4723]: I1211 15:38:46.095986 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"1b4ca02726d80e081d46c287702fffd46c138d8aa2dc5cebb399b9062e2667be"} Dec 11 15:38:46 crc kubenswrapper[4723]: I1211 15:38:46.143802 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" Dec 11 15:38:46 crc kubenswrapper[4723]: I1211 15:38:46.853511 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d"] Dec 11 15:38:54 crc kubenswrapper[4723]: W1211 15:38:54.995115 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bd859dc_e15a_476e_8a7c_6e9c8effba35.slice/crio-ddf468f3bffcdc308d1e5970a0be53834b5bc4580ceea20eeaff8115343801f1 WatchSource:0}: Error finding container ddf468f3bffcdc308d1e5970a0be53834b5bc4580ceea20eeaff8115343801f1: Status 404 returned error can't find the container with id ddf468f3bffcdc308d1e5970a0be53834b5bc4580ceea20eeaff8115343801f1 Dec 11 15:38:55 crc kubenswrapper[4723]: I1211 15:38:55.425933 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"ddf468f3bffcdc308d1e5970a0be53834b5bc4580ceea20eeaff8115343801f1"} Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.761310 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk"] Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.763605 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.768887 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.768997 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.771921 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk"] Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.927674 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/47690000-d05f-44e8-a913-37a9e225d35f-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.927775 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gls6t\" (UniqueName: \"kubernetes.io/projected/47690000-d05f-44e8-a913-37a9e225d35f-kube-api-access-gls6t\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.927806 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/47690000-d05f-44e8-a913-37a9e225d35f-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:38:59 crc kubenswrapper[4723]: I1211 15:38:59.927854 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/47690000-d05f-44e8-a913-37a9e225d35f-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.029311 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/47690000-d05f-44e8-a913-37a9e225d35f-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.029378 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gls6t\" (UniqueName: \"kubernetes.io/projected/47690000-d05f-44e8-a913-37a9e225d35f-kube-api-access-gls6t\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.029411 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/47690000-d05f-44e8-a913-37a9e225d35f-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.029455 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/47690000-d05f-44e8-a913-37a9e225d35f-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.029983 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/47690000-d05f-44e8-a913-37a9e225d35f-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.030374 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/47690000-d05f-44e8-a913-37a9e225d35f-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.036562 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/47690000-d05f-44e8-a913-37a9e225d35f-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.050220 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gls6t\" (UniqueName: \"kubernetes.io/projected/47690000-d05f-44e8-a913-37a9e225d35f-kube-api-access-gls6t\") pod \"default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk\" (UID: \"47690000-d05f-44e8-a913-37a9e225d35f\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:00 crc kubenswrapper[4723]: I1211 15:39:00.141615 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" Dec 11 15:39:01 crc kubenswrapper[4723]: E1211 15:39:01.927777 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="quay.io/openshift/origin-oauth-proxy:latest" Dec 11 15:39:01 crc kubenswrapper[4723]: E1211 15:39:01.927938 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:oauth-proxy,Image:quay.io/openshift/origin-oauth-proxy:latest,Command:[],Args:[-https-address=:8083 -tls-cert=/etc/tls/private/tls.crt -tls-key=/etc/tls/private/tls.key -cookie-secret-file=/etc/proxy/secrets/session_secret -openshift-service-account=smart-gateway -upstream=http://localhost:8081/ -openshift-delegate-urls={\"/\": {\"namespace\": \"service-telemetry\", \"resource\": \"smartgateways\", \"group\": \"smartgateway.infra.watch\", \"verb\": \"get\"}}],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:default-cloud1-coll-meter-proxy-tls,ReadOnly:false,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:session-secret,ReadOnly:false,MountPath:/etc/proxy/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wvx2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9_service-telemetry(5ef35e91-5f1f-4463-aded-49de8810267a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.478226 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9"] Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.479493 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.481736 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.536411 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9"] Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.668743 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.668804 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.668827 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cl2g\" (UniqueName: \"kubernetes.io/projected/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-kube-api-access-5cl2g\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.668939 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.770173 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.770253 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.770285 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cl2g\" (UniqueName: \"kubernetes.io/projected/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-kube-api-access-5cl2g\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.770329 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.770857 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.771929 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.779481 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.790540 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cl2g\" (UniqueName: \"kubernetes.io/projected/07f08f70-3f7a-4fa0-8a3b-8e57366cc516-kube-api-access-5cl2g\") pod \"default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9\" (UID: \"07f08f70-3f7a-4fa0-8a3b-8e57366cc516\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.806282 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" Dec 11 15:39:02 crc kubenswrapper[4723]: I1211 15:39:02.990031 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk"] Dec 11 15:39:02 crc kubenswrapper[4723]: W1211 15:39:02.997732 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47690000_d05f_44e8_a913_37a9e225d35f.slice/crio-e059b044af015be6f149ebb2d4f9377bf155009390b43f145ecd64234f48d7a8 WatchSource:0}: Error finding container e059b044af015be6f149ebb2d4f9377bf155009390b43f145ecd64234f48d7a8: Status 404 returned error can't find the container with id e059b044af015be6f149ebb2d4f9377bf155009390b43f145ecd64234f48d7a8 Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.184712 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9"] Dec 11 15:39:03 crc kubenswrapper[4723]: W1211 15:39:03.188683 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07f08f70_3f7a_4fa0_8a3b_8e57366cc516.slice/crio-ed67cec3c7b78f87eadd6fdc5d1a327c0702ccc2ddca3a401de10ad375982bd2 WatchSource:0}: Error finding container ed67cec3c7b78f87eadd6fdc5d1a327c0702ccc2ddca3a401de10ad375982bd2: Status 404 returned error can't find the container with id ed67cec3c7b78f87eadd6fdc5d1a327c0702ccc2ddca3a401de10ad375982bd2 Dec 11 15:39:03 crc kubenswrapper[4723]: E1211 15:39:03.331710 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="service-telemetry/prometheus-default-0" podUID="917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123" Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.500022 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"18d36473982b21d4889586ea4592b9137896519b60e5be929cf40a172fccb9d9"} Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.501833 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerStarted","Data":"a01b1e9b903c4ca51f811d11c784e1e44a0fd2b93ada06940b9ea85d4df8017b"} Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.503938 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" event={"ID":"d86b58d7-d8ba-4d6b-8b06-3013e693f293","Type":"ContainerStarted","Data":"17074933edf811fe82fa4bd68df6d5ff8751f3f411c314066ef422ec0610a8fe"} Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.505146 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerStarted","Data":"ed67cec3c7b78f87eadd6fdc5d1a327c0702ccc2ddca3a401de10ad375982bd2"} Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.506473 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerStarted","Data":"e059b044af015be6f149ebb2d4f9377bf155009390b43f145ecd64234f48d7a8"} Dec 11 15:39:03 crc kubenswrapper[4723]: I1211 15:39:03.552118 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-pvfdq" podStartSLOduration=2.0129276210000002 podStartE2EDuration="51.552096974s" podCreationTimestamp="2025-12-11 15:38:12 +0000 UTC" firstStartedPulling="2025-12-11 15:38:13.144197918 +0000 UTC m=+903.918431353" lastFinishedPulling="2025-12-11 15:39:02.683367271 +0000 UTC m=+953.457600706" observedRunningTime="2025-12-11 15:39:03.547955273 +0000 UTC m=+954.322188718" watchObservedRunningTime="2025-12-11 15:39:03.552096974 +0000 UTC m=+954.326330409" Dec 11 15:39:04 crc kubenswrapper[4723]: I1211 15:39:04.517458 4723 generic.go:334] "Generic (PLEG): container finished" podID="667fd4db-55d2-4e83-8f5b-73ffd8051429" containerID="2391babd88a5207e14fc6e9ee7b422792370469121979058026e1962a974f013" exitCode=0 Dec 11 15:39:04 crc kubenswrapper[4723]: I1211 15:39:04.517770 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerDied","Data":"2391babd88a5207e14fc6e9ee7b422792370469121979058026e1962a974f013"} Dec 11 15:39:04 crc kubenswrapper[4723]: I1211 15:39:04.526126 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"9f2ac79126d1c122f2e426068f78bbaf0d59788bd57f00c8d713c5040f78d05d"} Dec 11 15:39:05 crc kubenswrapper[4723]: I1211 15:39:05.537504 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123","Type":"ContainerStarted","Data":"55488dc1279132d5b928c3397c5a141e360194b4eb3845d09a624ccfe8fe7a1c"} Dec 11 15:39:05 crc kubenswrapper[4723]: I1211 15:39:05.562915 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.417271067 podStartE2EDuration="1m6.562901357s" podCreationTimestamp="2025-12-11 15:37:59 +0000 UTC" firstStartedPulling="2025-12-11 15:38:02.302861707 +0000 UTC m=+893.077095142" lastFinishedPulling="2025-12-11 15:39:04.448491997 +0000 UTC m=+955.222725432" observedRunningTime="2025-12-11 15:39:05.561777227 +0000 UTC m=+956.336010662" watchObservedRunningTime="2025-12-11 15:39:05.562901357 +0000 UTC m=+956.337134792" Dec 11 15:39:07 crc kubenswrapper[4723]: I1211 15:39:07.071691 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Dec 11 15:39:12 crc kubenswrapper[4723]: I1211 15:39:12.585561 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerStarted","Data":"ee937aa982bed1b519eeb1b59ee7415990d3e0d1a42c72901572a49d1b36a83b"} Dec 11 15:39:13 crc kubenswrapper[4723]: I1211 15:39:13.745076 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:39:13 crc kubenswrapper[4723]: I1211 15:39:13.745151 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:39:14 crc kubenswrapper[4723]: I1211 15:39:14.601747 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"6bc48f11b65bc71f0111a20f964d0954056f981f2c0f0b47c3777cfef2dcccd2"} Dec 11 15:39:14 crc kubenswrapper[4723]: I1211 15:39:14.603625 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerStarted","Data":"9c4511b6a5d7f4a56ce8ec76616cf58d50e91f662e657887b9951731e37b93b3"} Dec 11 15:39:14 crc kubenswrapper[4723]: I1211 15:39:14.606187 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"6a9146801a008a755e773d22ce9347cef96d306f1def049f27258d4c3ec9b745"} Dec 11 15:39:15 crc kubenswrapper[4723]: I1211 15:39:15.621141 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerStarted","Data":"85af019e9baeee535dbc61be23b21f0be40d5ad3490b6f401a7fa418d0faa652"} Dec 11 15:39:15 crc kubenswrapper[4723]: I1211 15:39:15.622715 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerStarted","Data":"421c952ea180785e4a0607a88792ec48ab3925054760591dafd1f88478ea695d"} Dec 11 15:39:16 crc kubenswrapper[4723]: I1211 15:39:16.644804 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerStarted","Data":"f8042b5822cbe1abb995eb8a0beb32a6f95775c191e69661e7687fca2d027fd4"} Dec 11 15:39:17 crc kubenswrapper[4723]: I1211 15:39:17.072212 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Dec 11 15:39:17 crc kubenswrapper[4723]: I1211 15:39:17.338997 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Dec 11 15:39:17 crc kubenswrapper[4723]: I1211 15:39:17.794642 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:39:17 crc kubenswrapper[4723]: I1211 15:39:17.794933 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" podUID="6c0558d6-3251-42b5-a202-12e388f9fdf0" containerName="default-interconnect" containerID="cri-o://e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232" gracePeriod=30 Dec 11 15:39:17 crc kubenswrapper[4723]: I1211 15:39:17.845299 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Dec 11 15:39:17 crc kubenswrapper[4723]: E1211 15:39:17.939079 4723 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c0558d6_3251_42b5_a202_12e388f9fdf0.slice/crio-e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232.scope\": RecentStats: unable to find data in memory cache]" Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.753298 4723 generic.go:334] "Generic (PLEG): container finished" podID="6c0558d6-3251-42b5-a202-12e388f9fdf0" containerID="e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232" exitCode=0 Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.753460 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" event={"ID":"6c0558d6-3251-42b5-a202-12e388f9fdf0","Type":"ContainerDied","Data":"e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232"} Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.755429 4723 generic.go:334] "Generic (PLEG): container finished" podID="5ef35e91-5f1f-4463-aded-49de8810267a" containerID="9c4511b6a5d7f4a56ce8ec76616cf58d50e91f662e657887b9951731e37b93b3" exitCode=0 Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.755471 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerDied","Data":"9c4511b6a5d7f4a56ce8ec76616cf58d50e91f662e657887b9951731e37b93b3"} Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.758251 4723 generic.go:334] "Generic (PLEG): container finished" podID="be4db1ae-f8d8-4739-ac2c-148857809916" containerID="6a9146801a008a755e773d22ce9347cef96d306f1def049f27258d4c3ec9b745" exitCode=0 Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.758313 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerDied","Data":"6a9146801a008a755e773d22ce9347cef96d306f1def049f27258d4c3ec9b745"} Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.760696 4723 generic.go:334] "Generic (PLEG): container finished" podID="5bd859dc-e15a-476e-8a7c-6e9c8effba35" containerID="6bc48f11b65bc71f0111a20f964d0954056f981f2c0f0b47c3777cfef2dcccd2" exitCode=0 Dec 11 15:39:18 crc kubenswrapper[4723]: I1211 15:39:18.760788 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerDied","Data":"6bc48f11b65bc71f0111a20f964d0954056f981f2c0f0b47c3777cfef2dcccd2"} Dec 11 15:39:19 crc kubenswrapper[4723]: I1211 15:39:19.770412 4723 generic.go:334] "Generic (PLEG): container finished" podID="07f08f70-3f7a-4fa0-8a3b-8e57366cc516" containerID="421c952ea180785e4a0607a88792ec48ab3925054760591dafd1f88478ea695d" exitCode=0 Dec 11 15:39:19 crc kubenswrapper[4723]: I1211 15:39:19.770507 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerDied","Data":"421c952ea180785e4a0607a88792ec48ab3925054760591dafd1f88478ea695d"} Dec 11 15:39:19 crc kubenswrapper[4723]: I1211 15:39:19.773422 4723 generic.go:334] "Generic (PLEG): container finished" podID="47690000-d05f-44e8-a913-37a9e225d35f" containerID="85af019e9baeee535dbc61be23b21f0be40d5ad3490b6f401a7fa418d0faa652" exitCode=0 Dec 11 15:39:19 crc kubenswrapper[4723]: I1211 15:39:19.773452 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerDied","Data":"85af019e9baeee535dbc61be23b21f0be40d5ad3490b6f401a7fa418d0faa652"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.422145 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.461311 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mcch7"] Dec 11 15:39:20 crc kubenswrapper[4723]: E1211 15:39:20.461640 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c0558d6-3251-42b5-a202-12e388f9fdf0" containerName="default-interconnect" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.461654 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c0558d6-3251-42b5-a202-12e388f9fdf0" containerName="default-interconnect" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.461799 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c0558d6-3251-42b5-a202-12e388f9fdf0" containerName="default-interconnect" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.462366 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.476451 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mcch7"] Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484303 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484366 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsgbm\" (UniqueName: \"kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484456 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484485 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484565 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484599 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.484621 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca\") pod \"6c0558d6-3251-42b5-a202-12e388f9fdf0\" (UID: \"6c0558d6-3251-42b5-a202-12e388f9fdf0\") " Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.492493 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.492665 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.492850 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.493622 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.498144 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.503431 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm" (OuterVolumeSpecName: "kube-api-access-bsgbm") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "kube-api-access-bsgbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.512267 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "6c0558d6-3251-42b5-a202-12e388f9fdf0" (UID: "6c0558d6-3251-42b5-a202-12e388f9fdf0"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.585956 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586292 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586334 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-users\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586434 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586515 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkcqk\" (UniqueName: \"kubernetes.io/projected/bd487b99-e69c-4809-a266-490e9ea7789f-kube-api-access-kkcqk\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586547 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-config\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586602 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586684 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsgbm\" (UniqueName: \"kubernetes.io/projected/6c0558d6-3251-42b5-a202-12e388f9fdf0-kube-api-access-bsgbm\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586703 4723 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-users\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586713 4723 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586739 4723 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586754 4723 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/6c0558d6-3251-42b5-a202-12e388f9fdf0-sasl-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586763 4723 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.586775 4723 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/6c0558d6-3251-42b5-a202-12e388f9fdf0-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.687895 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.687951 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.687992 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.688028 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-users\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.688074 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.688122 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkcqk\" (UniqueName: \"kubernetes.io/projected/bd487b99-e69c-4809-a266-490e9ea7789f-kube-api-access-kkcqk\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.688152 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-config\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.690119 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-config\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.697184 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.697590 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.697863 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.698030 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.699341 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/bd487b99-e69c-4809-a266-490e9ea7789f-sasl-users\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.708000 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkcqk\" (UniqueName: \"kubernetes.io/projected/bd487b99-e69c-4809-a266-490e9ea7789f-kube-api-access-kkcqk\") pod \"default-interconnect-68864d46cb-mcch7\" (UID: \"bd487b99-e69c-4809-a266-490e9ea7789f\") " pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: E1211 15:39:20.735099 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" podUID="5ef35e91-5f1f-4463-aded-49de8810267a" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.782047 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"667fd4db-55d2-4e83-8f5b-73ffd8051429","Type":"ContainerStarted","Data":"827d283edb7f7fe2273fa61d3a125da68d8507ee02b8c90002b9812cca4b2d62"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.782389 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mcch7" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.794612 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"f25eab50c632c67ad7f00df1fcb34d7f33ba04ca025126ae84c090b724101660"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.795105 4723 scope.go:117] "RemoveContainer" containerID="6bc48f11b65bc71f0111a20f964d0954056f981f2c0f0b47c3777cfef2dcccd2" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.798071 4723 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.798494 4723 scope.go:117] "RemoveContainer" containerID="421c952ea180785e4a0607a88792ec48ab3925054760591dafd1f88478ea695d" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.798500 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerStarted","Data":"d7a07756301b166bf4193febf4ad2bae890e41bf34434cf089cc3be808baad6f"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.800240 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerStarted","Data":"37ad6ec15a82a74d921d4d0c4d04510e01ceff58008de2f50e049e3ecdb1c445"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.800558 4723 scope.go:117] "RemoveContainer" containerID="85af019e9baeee535dbc61be23b21f0be40d5ad3490b6f401a7fa418d0faa652" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.802182 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" event={"ID":"6c0558d6-3251-42b5-a202-12e388f9fdf0","Type":"ContainerDied","Data":"d969d5effff59d0034ef95580a59cadef3956b4f61ae2f23e74931278c08a426"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.802204 4723 scope.go:117] "RemoveContainer" containerID="e9f740c11adb8fc0be265033eeb490f6a7abd542aaee9f9aa6cb957087f66232" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.802321 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6md6t" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.816951 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerStarted","Data":"83cc7c6de82dbeb147bc24e07e2efe71575a75c6d884732c04f74e00b08b3da3"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.817544 4723 scope.go:117] "RemoveContainer" containerID="9c4511b6a5d7f4a56ce8ec76616cf58d50e91f662e657887b9951731e37b93b3" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.827905 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=46.766818967 podStartE2EDuration="1m2.827884372s" podCreationTimestamp="2025-12-11 15:38:18 +0000 UTC" firstStartedPulling="2025-12-11 15:39:04.519931687 +0000 UTC m=+955.294165122" lastFinishedPulling="2025-12-11 15:39:20.580997092 +0000 UTC m=+971.355230527" observedRunningTime="2025-12-11 15:39:20.822661733 +0000 UTC m=+971.596895168" watchObservedRunningTime="2025-12-11 15:39:20.827884372 +0000 UTC m=+971.602117817" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.836807 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"753afdaddf0717f6d816967a779840cb9855b21a421e1a68e1a272802bdbdc14"} Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.837637 4723 scope.go:117] "RemoveContainer" containerID="6a9146801a008a755e773d22ce9347cef96d306f1def049f27258d4c3ec9b745" Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.937073 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:39:20 crc kubenswrapper[4723]: I1211 15:39:20.940780 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6md6t"] Dec 11 15:39:21 crc kubenswrapper[4723]: I1211 15:39:21.411807 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mcch7"] Dec 11 15:39:21 crc kubenswrapper[4723]: W1211 15:39:21.415616 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd487b99_e69c_4809_a266_490e9ea7789f.slice/crio-c87113dbfe07508d6f3943b6a4c76222aecc8e2cd9339414c395bab0e9beebf4 WatchSource:0}: Error finding container c87113dbfe07508d6f3943b6a4c76222aecc8e2cd9339414c395bab0e9beebf4: Status 404 returned error can't find the container with id c87113dbfe07508d6f3943b6a4c76222aecc8e2cd9339414c395bab0e9beebf4 Dec 11 15:39:21 crc kubenswrapper[4723]: I1211 15:39:21.557563 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c0558d6-3251-42b5-a202-12e388f9fdf0" path="/var/lib/kubelet/pods/6c0558d6-3251-42b5-a202-12e388f9fdf0/volumes" Dec 11 15:39:21 crc kubenswrapper[4723]: I1211 15:39:21.844259 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mcch7" event={"ID":"bd487b99-e69c-4809-a266-490e9ea7789f","Type":"ContainerStarted","Data":"c87113dbfe07508d6f3943b6a4c76222aecc8e2cd9339414c395bab0e9beebf4"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.852787 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"381d9a25b78de6bbaada3e79b170851669c6508b8d400a23f5aa43d70285ac73"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.855000 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"ea02e09cbff438150cea0dd5bf95ab559da8e1a7b14b690aac89431bc9e40ab6"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.856961 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerStarted","Data":"97424b9dc4f6f38ab044d9745651cae99c0fad4526e8b54ab96efdc6ae47408d"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.859233 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerStarted","Data":"8d8b2bcb8ff1f5b6223336bcc2b8c46561dd519047c6f04ea187cf8524852023"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.860721 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mcch7" event={"ID":"bd487b99-e69c-4809-a266-490e9ea7789f","Type":"ContainerStarted","Data":"ec0aaf9a44a5f15068f8dba4bb0f320d7deb9aea7b830496a51d2e35a6ed34d1"} Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.870463 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" podStartSLOduration=5.966242547 podStartE2EDuration="42.870445244s" podCreationTimestamp="2025-12-11 15:38:40 +0000 UTC" firstStartedPulling="2025-12-11 15:38:45.092961733 +0000 UTC m=+935.867195168" lastFinishedPulling="2025-12-11 15:39:21.99716443 +0000 UTC m=+972.771397865" observedRunningTime="2025-12-11 15:39:22.869882319 +0000 UTC m=+973.644115754" watchObservedRunningTime="2025-12-11 15:39:22.870445244 +0000 UTC m=+973.644678679" Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.890111 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-mcch7" podStartSLOduration=5.890096739 podStartE2EDuration="5.890096739s" podCreationTimestamp="2025-12-11 15:39:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:39:22.885774853 +0000 UTC m=+973.660008298" watchObservedRunningTime="2025-12-11 15:39:22.890096739 +0000 UTC m=+973.664330174" Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.915780 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" podStartSLOduration=11.93378796 podStartE2EDuration="38.915761045s" podCreationTimestamp="2025-12-11 15:38:44 +0000 UTC" firstStartedPulling="2025-12-11 15:38:55.003452011 +0000 UTC m=+945.777685446" lastFinishedPulling="2025-12-11 15:39:21.985425096 +0000 UTC m=+972.759658531" observedRunningTime="2025-12-11 15:39:22.912730664 +0000 UTC m=+973.686964099" watchObservedRunningTime="2025-12-11 15:39:22.915761045 +0000 UTC m=+973.689994740" Dec 11 15:39:22 crc kubenswrapper[4723]: I1211 15:39:22.933609 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" podStartSLOduration=2.154778074 podStartE2EDuration="20.933593792s" podCreationTimestamp="2025-12-11 15:39:02 +0000 UTC" firstStartedPulling="2025-12-11 15:39:03.191420922 +0000 UTC m=+953.965654367" lastFinishedPulling="2025-12-11 15:39:21.97023665 +0000 UTC m=+972.744470085" observedRunningTime="2025-12-11 15:39:22.929206875 +0000 UTC m=+973.703440310" watchObservedRunningTime="2025-12-11 15:39:22.933593792 +0000 UTC m=+973.707827227" Dec 11 15:39:23 crc kubenswrapper[4723]: I1211 15:39:23.868373 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerStarted","Data":"5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0"} Dec 11 15:39:23 crc kubenswrapper[4723]: I1211 15:39:23.885116 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" podStartSLOduration=5.273951711 podStartE2EDuration="24.885096228s" podCreationTimestamp="2025-12-11 15:38:59 +0000 UTC" firstStartedPulling="2025-12-11 15:39:03.001337451 +0000 UTC m=+953.775570886" lastFinishedPulling="2025-12-11 15:39:22.612481968 +0000 UTC m=+973.386715403" observedRunningTime="2025-12-11 15:39:23.881672066 +0000 UTC m=+974.655905501" watchObservedRunningTime="2025-12-11 15:39:23.885096228 +0000 UTC m=+974.659329663" Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.878247 4723 generic.go:334] "Generic (PLEG): container finished" podID="47690000-d05f-44e8-a913-37a9e225d35f" containerID="5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0" exitCode=0 Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.878330 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerDied","Data":"5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0"} Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.878423 4723 scope.go:117] "RemoveContainer" containerID="85af019e9baeee535dbc61be23b21f0be40d5ad3490b6f401a7fa418d0faa652" Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.878862 4723 scope.go:117] "RemoveContainer" containerID="5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0" Dec 11 15:39:24 crc kubenswrapper[4723]: E1211 15:39:24.879176 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk_service-telemetry(47690000-d05f-44e8-a913-37a9e225d35f)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" podUID="47690000-d05f-44e8-a913-37a9e225d35f" Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.881378 4723 generic.go:334] "Generic (PLEG): container finished" podID="be4db1ae-f8d8-4739-ac2c-148857809916" containerID="381d9a25b78de6bbaada3e79b170851669c6508b8d400a23f5aa43d70285ac73" exitCode=0 Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.881446 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerDied","Data":"381d9a25b78de6bbaada3e79b170851669c6508b8d400a23f5aa43d70285ac73"} Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.881826 4723 scope.go:117] "RemoveContainer" containerID="381d9a25b78de6bbaada3e79b170851669c6508b8d400a23f5aa43d70285ac73" Dec 11 15:39:24 crc kubenswrapper[4723]: E1211 15:39:24.882004 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4_service-telemetry(be4db1ae-f8d8-4739-ac2c-148857809916)\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" podUID="be4db1ae-f8d8-4739-ac2c-148857809916" Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.884508 4723 generic.go:334] "Generic (PLEG): container finished" podID="5bd859dc-e15a-476e-8a7c-6e9c8effba35" containerID="ea02e09cbff438150cea0dd5bf95ab559da8e1a7b14b690aac89431bc9e40ab6" exitCode=0 Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.884568 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerDied","Data":"ea02e09cbff438150cea0dd5bf95ab559da8e1a7b14b690aac89431bc9e40ab6"} Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.885567 4723 scope.go:117] "RemoveContainer" containerID="ea02e09cbff438150cea0dd5bf95ab559da8e1a7b14b690aac89431bc9e40ab6" Dec 11 15:39:24 crc kubenswrapper[4723]: E1211 15:39:24.885919 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d_service-telemetry(5bd859dc-e15a-476e-8a7c-6e9c8effba35)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" podUID="5bd859dc-e15a-476e-8a7c-6e9c8effba35" Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.887070 4723 generic.go:334] "Generic (PLEG): container finished" podID="07f08f70-3f7a-4fa0-8a3b-8e57366cc516" containerID="97424b9dc4f6f38ab044d9745651cae99c0fad4526e8b54ab96efdc6ae47408d" exitCode=0 Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.887149 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerDied","Data":"97424b9dc4f6f38ab044d9745651cae99c0fad4526e8b54ab96efdc6ae47408d"} Dec 11 15:39:24 crc kubenswrapper[4723]: I1211 15:39:24.888702 4723 scope.go:117] "RemoveContainer" containerID="97424b9dc4f6f38ab044d9745651cae99c0fad4526e8b54ab96efdc6ae47408d" Dec 11 15:39:24 crc kubenswrapper[4723]: E1211 15:39:24.889035 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9_service-telemetry(07f08f70-3f7a-4fa0-8a3b-8e57366cc516)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" podUID="07f08f70-3f7a-4fa0-8a3b-8e57366cc516" Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.134087 4723 scope.go:117] "RemoveContainer" containerID="6a9146801a008a755e773d22ce9347cef96d306f1def049f27258d4c3ec9b745" Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.233172 4723 scope.go:117] "RemoveContainer" containerID="6bc48f11b65bc71f0111a20f964d0954056f981f2c0f0b47c3777cfef2dcccd2" Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.278129 4723 scope.go:117] "RemoveContainer" containerID="421c952ea180785e4a0607a88792ec48ab3925054760591dafd1f88478ea695d" Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.896854 4723 scope.go:117] "RemoveContainer" containerID="5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0" Dec 11 15:39:25 crc kubenswrapper[4723]: E1211 15:39:25.897364 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk_service-telemetry(47690000-d05f-44e8-a913-37a9e225d35f)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" podUID="47690000-d05f-44e8-a913-37a9e225d35f" Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.901338 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" event={"ID":"5ef35e91-5f1f-4463-aded-49de8810267a","Type":"ContainerStarted","Data":"d8c7569daa8e993cfdd783785a054f1a0b4408d267c23dd69da68c19cff59440"} Dec 11 15:39:25 crc kubenswrapper[4723]: I1211 15:39:25.943877 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9" podStartSLOduration=3.791879378 podStartE2EDuration="49.94330712s" podCreationTimestamp="2025-12-11 15:38:36 +0000 UTC" firstStartedPulling="2025-12-11 15:38:38.946522647 +0000 UTC m=+929.720756082" lastFinishedPulling="2025-12-11 15:39:25.097950389 +0000 UTC m=+975.872183824" observedRunningTime="2025-12-11 15:39:25.943112774 +0000 UTC m=+976.717346219" watchObservedRunningTime="2025-12-11 15:39:25.94330712 +0000 UTC m=+976.717540555" Dec 11 15:39:35 crc kubenswrapper[4723]: I1211 15:39:35.549465 4723 scope.go:117] "RemoveContainer" containerID="381d9a25b78de6bbaada3e79b170851669c6508b8d400a23f5aa43d70285ac73" Dec 11 15:39:36 crc kubenswrapper[4723]: I1211 15:39:36.990097 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4" event={"ID":"be4db1ae-f8d8-4739-ac2c-148857809916","Type":"ContainerStarted","Data":"f1434d1e3046b644d86dfb853bd54a5c0ae272ab37c0d48878e6fef55d71b0fd"} Dec 11 15:39:37 crc kubenswrapper[4723]: I1211 15:39:37.548728 4723 scope.go:117] "RemoveContainer" containerID="ea02e09cbff438150cea0dd5bf95ab559da8e1a7b14b690aac89431bc9e40ab6" Dec 11 15:39:37 crc kubenswrapper[4723]: I1211 15:39:37.548942 4723 scope.go:117] "RemoveContainer" containerID="5179989c3e27f3c9ae9ecf1afd18329aff44f66d282c6c05769d4fed4676f0d0" Dec 11 15:39:39 crc kubenswrapper[4723]: I1211 15:39:39.004285 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d" event={"ID":"5bd859dc-e15a-476e-8a7c-6e9c8effba35","Type":"ContainerStarted","Data":"a995937d4a78c9ccc8bc03f68996d48235939f87048df9bac49e2458ae61eb81"} Dec 11 15:39:39 crc kubenswrapper[4723]: I1211 15:39:39.007074 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk" event={"ID":"47690000-d05f-44e8-a913-37a9e225d35f","Type":"ContainerStarted","Data":"9a179d7e4ee5b781f507256529dc3fd23d4c3b2d91979396b81159ec0f7fe63d"} Dec 11 15:39:39 crc kubenswrapper[4723]: I1211 15:39:39.554439 4723 scope.go:117] "RemoveContainer" containerID="97424b9dc4f6f38ab044d9745651cae99c0fad4526e8b54ab96efdc6ae47408d" Dec 11 15:39:41 crc kubenswrapper[4723]: I1211 15:39:41.023270 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9" event={"ID":"07f08f70-3f7a-4fa0-8a3b-8e57366cc516","Type":"ContainerStarted","Data":"2457fe8fca0470b7d20b44c959d8d7fe76efe3e9c5b3e655c05e6105cf1961ff"} Dec 11 15:39:43 crc kubenswrapper[4723]: I1211 15:39:43.745235 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:39:43 crc kubenswrapper[4723]: I1211 15:39:43.745691 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:39:51 crc kubenswrapper[4723]: I1211 15:39:51.943244 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Dec 11 15:39:51 crc kubenswrapper[4723]: I1211 15:39:51.944758 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 11 15:39:51 crc kubenswrapper[4723]: I1211 15:39:51.947339 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Dec 11 15:39:51 crc kubenswrapper[4723]: I1211 15:39:51.947769 4723 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Dec 11 15:39:51 crc kubenswrapper[4723]: I1211 15:39:51.961030 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.138688 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhtt7\" (UniqueName: \"kubernetes.io/projected/b217a81f-1bb9-41fa-89af-3304819123d9-kube-api-access-lhtt7\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.138782 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/b217a81f-1bb9-41fa-89af-3304819123d9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.138805 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/b217a81f-1bb9-41fa-89af-3304819123d9-qdr-test-config\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.240241 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhtt7\" (UniqueName: \"kubernetes.io/projected/b217a81f-1bb9-41fa-89af-3304819123d9-kube-api-access-lhtt7\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.240590 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/b217a81f-1bb9-41fa-89af-3304819123d9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.240681 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/b217a81f-1bb9-41fa-89af-3304819123d9-qdr-test-config\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.241547 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/b217a81f-1bb9-41fa-89af-3304819123d9-qdr-test-config\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.257349 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/b217a81f-1bb9-41fa-89af-3304819123d9-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.272947 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhtt7\" (UniqueName: \"kubernetes.io/projected/b217a81f-1bb9-41fa-89af-3304819123d9-kube-api-access-lhtt7\") pod \"qdr-test\" (UID: \"b217a81f-1bb9-41fa-89af-3304819123d9\") " pod="service-telemetry/qdr-test" Dec 11 15:39:52 crc kubenswrapper[4723]: I1211 15:39:52.571380 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Dec 11 15:39:53 crc kubenswrapper[4723]: I1211 15:39:53.022521 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Dec 11 15:39:53 crc kubenswrapper[4723]: W1211 15:39:53.027157 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb217a81f_1bb9_41fa_89af_3304819123d9.slice/crio-e34e51a61d4402bad4cf0a23586ac17ba4fe6e5baf323f98f8f5b3cf41eccf02 WatchSource:0}: Error finding container e34e51a61d4402bad4cf0a23586ac17ba4fe6e5baf323f98f8f5b3cf41eccf02: Status 404 returned error can't find the container with id e34e51a61d4402bad4cf0a23586ac17ba4fe6e5baf323f98f8f5b3cf41eccf02 Dec 11 15:39:53 crc kubenswrapper[4723]: I1211 15:39:53.121186 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"b217a81f-1bb9-41fa-89af-3304819123d9","Type":"ContainerStarted","Data":"e34e51a61d4402bad4cf0a23586ac17ba4fe6e5baf323f98f8f5b3cf41eccf02"} Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.183028 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"b217a81f-1bb9-41fa-89af-3304819123d9","Type":"ContainerStarted","Data":"d0d06588d7210c2fc055f81e094ca823ae1eafe2c5f33983266431352a2b9367"} Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.198325 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=2.806634399 podStartE2EDuration="12.198282063s" podCreationTimestamp="2025-12-11 15:39:51 +0000 UTC" firstStartedPulling="2025-12-11 15:39:53.028633213 +0000 UTC m=+1003.802866648" lastFinishedPulling="2025-12-11 15:40:02.420280877 +0000 UTC m=+1013.194514312" observedRunningTime="2025-12-11 15:40:03.196786063 +0000 UTC m=+1013.971019498" watchObservedRunningTime="2025-12-11 15:40:03.198282063 +0000 UTC m=+1013.972515508" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.471826 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kkg48"] Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.473203 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.474621 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.474984 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.475610 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.479466 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.479855 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.482961 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.486568 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kkg48"] Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555333 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555389 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88kj7\" (UniqueName: \"kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555432 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555459 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555482 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555506 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.555613 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656427 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656473 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656510 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656590 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656655 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88kj7\" (UniqueName: \"kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656691 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.656712 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.657549 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.657876 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.658731 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.658796 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.661537 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.669397 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.695197 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88kj7\" (UniqueName: \"kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7\") pod \"stf-smoketest-smoke1-kkg48\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.793514 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.853126 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.854126 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.859043 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 11 15:40:03 crc kubenswrapper[4723]: I1211 15:40:03.961263 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrrph\" (UniqueName: \"kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph\") pod \"curl\" (UID: \"1ff04c30-545d-4d74-ae53-2ac1ff88ab71\") " pod="service-telemetry/curl" Dec 11 15:40:04 crc kubenswrapper[4723]: I1211 15:40:04.063270 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrrph\" (UniqueName: \"kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph\") pod \"curl\" (UID: \"1ff04c30-545d-4d74-ae53-2ac1ff88ab71\") " pod="service-telemetry/curl" Dec 11 15:40:04 crc kubenswrapper[4723]: I1211 15:40:04.080811 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrrph\" (UniqueName: \"kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph\") pod \"curl\" (UID: \"1ff04c30-545d-4d74-ae53-2ac1ff88ab71\") " pod="service-telemetry/curl" Dec 11 15:40:04 crc kubenswrapper[4723]: I1211 15:40:04.185647 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 11 15:40:04 crc kubenswrapper[4723]: I1211 15:40:04.210603 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kkg48"] Dec 11 15:40:04 crc kubenswrapper[4723]: W1211 15:40:04.217770 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29e72b8f_5cf3_411c_af28_2d725b9223bc.slice/crio-8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e WatchSource:0}: Error finding container 8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e: Status 404 returned error can't find the container with id 8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e Dec 11 15:40:04 crc kubenswrapper[4723]: I1211 15:40:04.378998 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Dec 11 15:40:04 crc kubenswrapper[4723]: W1211 15:40:04.381700 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ff04c30_545d_4d74_ae53_2ac1ff88ab71.slice/crio-db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced WatchSource:0}: Error finding container db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced: Status 404 returned error can't find the container with id db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced Dec 11 15:40:05 crc kubenswrapper[4723]: I1211 15:40:05.198997 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"1ff04c30-545d-4d74-ae53-2ac1ff88ab71","Type":"ContainerStarted","Data":"db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced"} Dec 11 15:40:05 crc kubenswrapper[4723]: I1211 15:40:05.200153 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerStarted","Data":"8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e"} Dec 11 15:40:13 crc kubenswrapper[4723]: I1211 15:40:13.745102 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:40:13 crc kubenswrapper[4723]: I1211 15:40:13.745633 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:40:13 crc kubenswrapper[4723]: I1211 15:40:13.745757 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:40:13 crc kubenswrapper[4723]: I1211 15:40:13.746835 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:40:13 crc kubenswrapper[4723]: I1211 15:40:13.746939 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e" gracePeriod=600 Dec 11 15:40:14 crc kubenswrapper[4723]: I1211 15:40:14.374100 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e" exitCode=0 Dec 11 15:40:14 crc kubenswrapper[4723]: I1211 15:40:14.374376 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e"} Dec 11 15:40:14 crc kubenswrapper[4723]: I1211 15:40:14.374509 4723 scope.go:117] "RemoveContainer" containerID="ee76f68a1077eaff7e44d5582a42e9fd3b36da3d7d13741712c5ce62619f8d4e" Dec 11 15:40:17 crc kubenswrapper[4723]: I1211 15:40:17.398124 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerStarted","Data":"92b0a9e1c164ec8d023e75819d3662504aea8d30c07de50c7fc74ef577a22055"} Dec 11 15:40:17 crc kubenswrapper[4723]: I1211 15:40:17.400271 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136"} Dec 11 15:40:17 crc kubenswrapper[4723]: I1211 15:40:17.402525 4723 generic.go:334] "Generic (PLEG): container finished" podID="1ff04c30-545d-4d74-ae53-2ac1ff88ab71" containerID="f210a288077cbbcc61dd3f9e9e5a7729e6c133b06f5923f36de1b6f193839023" exitCode=0 Dec 11 15:40:17 crc kubenswrapper[4723]: I1211 15:40:17.402556 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"1ff04c30-545d-4d74-ae53-2ac1ff88ab71","Type":"ContainerDied","Data":"f210a288077cbbcc61dd3f9e9e5a7729e6c133b06f5923f36de1b6f193839023"} Dec 11 15:40:18 crc kubenswrapper[4723]: I1211 15:40:18.698556 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 11 15:40:18 crc kubenswrapper[4723]: I1211 15:40:18.759306 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrrph\" (UniqueName: \"kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph\") pod \"1ff04c30-545d-4d74-ae53-2ac1ff88ab71\" (UID: \"1ff04c30-545d-4d74-ae53-2ac1ff88ab71\") " Dec 11 15:40:18 crc kubenswrapper[4723]: I1211 15:40:18.768246 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph" (OuterVolumeSpecName: "kube-api-access-vrrph") pod "1ff04c30-545d-4d74-ae53-2ac1ff88ab71" (UID: "1ff04c30-545d-4d74-ae53-2ac1ff88ab71"). InnerVolumeSpecName "kube-api-access-vrrph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:40:18 crc kubenswrapper[4723]: I1211 15:40:18.852633 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_1ff04c30-545d-4d74-ae53-2ac1ff88ab71/curl/0.log" Dec 11 15:40:18 crc kubenswrapper[4723]: I1211 15:40:18.860464 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrrph\" (UniqueName: \"kubernetes.io/projected/1ff04c30-545d-4d74-ae53-2ac1ff88ab71-kube-api-access-vrrph\") on node \"crc\" DevicePath \"\"" Dec 11 15:40:19 crc kubenswrapper[4723]: I1211 15:40:19.095627 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-pvfdq_d86b58d7-d8ba-4d6b-8b06-3013e693f293/prometheus-webhook-snmp/0.log" Dec 11 15:40:19 crc kubenswrapper[4723]: I1211 15:40:19.417141 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"1ff04c30-545d-4d74-ae53-2ac1ff88ab71","Type":"ContainerDied","Data":"db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced"} Dec 11 15:40:19 crc kubenswrapper[4723]: I1211 15:40:19.417184 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db6c3c606e60a357f00b3ee1d0d57ec320db58570e633da8a718d6376b213ced" Dec 11 15:40:19 crc kubenswrapper[4723]: I1211 15:40:19.417609 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Dec 11 15:40:36 crc kubenswrapper[4723]: E1211 15:40:36.665661 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/tripleomastercentos9/openstack-ceilometer-notification:current-tripleo" Dec 11 15:40:36 crc kubenswrapper[4723]: E1211 15:40:36.666819 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:smoketest-ceilometer,Image:quay.io/tripleomastercentos9/openstack-ceilometer-notification:current-tripleo,Command:[/smoketest_ceilometer_entrypoint.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CLOUDNAME,Value:smoke1,ValueFrom:nil,},EnvVar{Name:ELASTICSEARCH_AUTH_PASS,Value:tgBNyecLCgtXHGzPq09kvRMz,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_AUTH_TOKEN,Value:eyJhbGciOiJSUzI1NiIsImtpZCI6InF6SnFxNFFjbVk5VmJQZ2dNMmUxdHFmTlJlVWx4UDhSTlhIamV3RUx4WU0ifQ.eyJhdWQiOlsiaHR0cHM6Ly9rdWJlcm5ldGVzLmRlZmF1bHQuc3ZjIl0sImV4cCI6MTc2NTQ3MTE4NywiaWF0IjoxNzY1NDY3NTg3LCJpc3MiOiJodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmMiLCJqdGkiOiI3MTlhZjQ3Ni0wOTZkLTRhZGItYTgyMi02ZTU5NTI1ODlmMDMiLCJrdWJlcm5ldGVzLmlvIjp7Im5hbWVzcGFjZSI6InNlcnZpY2UtdGVsZW1ldHJ5Iiwic2VydmljZWFjY291bnQiOnsibmFtZSI6InN0Zi1wcm9tZXRoZXVzLXJlYWRlciIsInVpZCI6IjQwMzA1OTA4LWNmMzMtNGRhZS1iMmU5LTNjMjM5ZGJjYmE5ZCJ9fSwibmJmIjoxNzY1NDY3NTg3LCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6c2VydmljZS10ZWxlbWV0cnk6c3RmLXByb21ldGhldXMtcmVhZGVyIn0.UjymVglrBEyHWKNbd1TSMtBiXHHuqLAsNeuriDMyT_OhJE8mEaQVoNh_KotC_3sdFgP5EbxGrqJksJwNz3ELCI0KDfLULDF7CZtOp9FrgDxsr3H2f44dPvgolPxtyiTVPC9SuMKLSXvmPc0uXvsC9syYup55or0bFpfGGg5qc6MQOILiaSEK2zL54HvgkDHVim27I1v5X4CQrR_kVGRjTuB5oE0ehZ7ROQEEy0KYAHtXypEENMQANj468q2fPnOryINqIHHJDrqQnttYYtAqHibhnP5o-ZDkGK0kOFWI6D8dESg-R07Lh9jJxtKtC75fMEOj8exSxcLqNcZYOweO0VmWD_LsJkD8F1e1FhVFu-sT9SxuHzqU-zSDy87lSv2tWLf5bbE8DYYMzr9yVqTkEIlrJ61Ns70S9Pm7y-mwWlgu-AcLziamEg5top-k0VNvGKw1WQjnHm1vP78d8_cyLVGnWdKFVdQsyzDrkBkZbLCd_nCyrjF9eBPMQLPDqWGfEu2B4d_GrwH4UQjwhQPu085DUD7DeeRjV8HljoZf1F6ALfKzMYvSkG1DOYccHD2RS3bZlSfnSX140czTpYGgktKU5PznPzuoG5-S4eXltSacFXY9w6XqYxbxkhcS5wVLjtHf8cuMr7xC7GOWzd2sH0qPFe0FhBx11nmeludWUxo,ValueFrom:nil,},EnvVar{Name:OBSERVABILITY_STRATEGY,Value:<>,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ceilometer-publisher,ReadOnly:false,MountPath:/ceilometer_publish.py,SubPath:ceilometer_publish.py,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ceilometer-entrypoint-script,ReadOnly:false,MountPath:/smoketest_ceilometer_entrypoint.sh,SubPath:smoketest_ceilometer_entrypoint.sh,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-88kj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod stf-smoketest-smoke1-kkg48_service-telemetry(29e72b8f-5cf3-411c-af28-2d725b9223bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 11 15:40:36 crc kubenswrapper[4723]: E1211 15:40:36.668005 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-ceilometer\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/stf-smoketest-smoke1-kkg48" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" Dec 11 15:40:37 crc kubenswrapper[4723]: E1211 15:40:37.629010 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-ceilometer\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/tripleomastercentos9/openstack-ceilometer-notification:current-tripleo\\\"\"" pod="service-telemetry/stf-smoketest-smoke1-kkg48" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.468776 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:40:48 crc kubenswrapper[4723]: E1211 15:40:48.471536 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ff04c30-545d-4d74-ae53-2ac1ff88ab71" containerName="curl" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.471579 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ff04c30-545d-4d74-ae53-2ac1ff88ab71" containerName="curl" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.473066 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ff04c30-545d-4d74-ae53-2ac1ff88ab71" containerName="curl" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.475074 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.513633 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.522101 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwv42\" (UniqueName: \"kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42\") pod \"infrawatch-operators-cjs2v\" (UID: \"aade0fa7-f91f-484c-9391-1b54007d0ae9\") " pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.624721 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwv42\" (UniqueName: \"kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42\") pod \"infrawatch-operators-cjs2v\" (UID: \"aade0fa7-f91f-484c-9391-1b54007d0ae9\") " pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.661881 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwv42\" (UniqueName: \"kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42\") pod \"infrawatch-operators-cjs2v\" (UID: \"aade0fa7-f91f-484c-9391-1b54007d0ae9\") " pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:48 crc kubenswrapper[4723]: I1211 15:40:48.824509 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:49 crc kubenswrapper[4723]: I1211 15:40:49.232938 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:40:49 crc kubenswrapper[4723]: I1211 15:40:49.244081 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-pvfdq_d86b58d7-d8ba-4d6b-8b06-3013e693f293/prometheus-webhook-snmp/0.log" Dec 11 15:40:49 crc kubenswrapper[4723]: I1211 15:40:49.717616 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-cjs2v" event={"ID":"aade0fa7-f91f-484c-9391-1b54007d0ae9","Type":"ContainerStarted","Data":"c3d4258fbb53d6fb8b68455c11b640214491730bb8e4ae194542848ca0b7000c"} Dec 11 15:40:50 crc kubenswrapper[4723]: I1211 15:40:50.726419 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-cjs2v" event={"ID":"aade0fa7-f91f-484c-9391-1b54007d0ae9","Type":"ContainerStarted","Data":"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8"} Dec 11 15:40:50 crc kubenswrapper[4723]: I1211 15:40:50.740686 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-cjs2v" podStartSLOduration=1.50109662 podStartE2EDuration="2.740665264s" podCreationTimestamp="2025-12-11 15:40:48 +0000 UTC" firstStartedPulling="2025-12-11 15:40:49.251643686 +0000 UTC m=+1060.025877131" lastFinishedPulling="2025-12-11 15:40:50.49121234 +0000 UTC m=+1061.265445775" observedRunningTime="2025-12-11 15:40:50.739275766 +0000 UTC m=+1061.513509211" watchObservedRunningTime="2025-12-11 15:40:50.740665264 +0000 UTC m=+1061.514898699" Dec 11 15:40:51 crc kubenswrapper[4723]: I1211 15:40:51.742505 4723 generic.go:334] "Generic (PLEG): container finished" podID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerID="92b0a9e1c164ec8d023e75819d3662504aea8d30c07de50c7fc74ef577a22055" exitCode=1 Dec 11 15:40:51 crc kubenswrapper[4723]: I1211 15:40:51.744868 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerDied","Data":"92b0a9e1c164ec8d023e75819d3662504aea8d30c07de50c7fc74ef577a22055"} Dec 11 15:40:51 crc kubenswrapper[4723]: I1211 15:40:51.744942 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerStarted","Data":"d972e00504470beb44b1a83ba5ce3a89cda9e765b1fa593aa266b9864ca0b08a"} Dec 11 15:40:51 crc kubenswrapper[4723]: I1211 15:40:51.745563 4723 scope.go:117] "RemoveContainer" containerID="92b0a9e1c164ec8d023e75819d3662504aea8d30c07de50c7fc74ef577a22055" Dec 11 15:40:58 crc kubenswrapper[4723]: I1211 15:40:58.825483 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:58 crc kubenswrapper[4723]: I1211 15:40:58.826333 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:58 crc kubenswrapper[4723]: I1211 15:40:58.851192 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:40:59 crc kubenswrapper[4723]: I1211 15:40:59.826014 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.261689 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.262245 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-cjs2v" podUID="aade0fa7-f91f-484c-9391-1b54007d0ae9" containerName="registry-server" containerID="cri-o://86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8" gracePeriod=2 Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.708392 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.732832 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwv42\" (UniqueName: \"kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42\") pod \"aade0fa7-f91f-484c-9391-1b54007d0ae9\" (UID: \"aade0fa7-f91f-484c-9391-1b54007d0ae9\") " Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.740108 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42" (OuterVolumeSpecName: "kube-api-access-vwv42") pod "aade0fa7-f91f-484c-9391-1b54007d0ae9" (UID: "aade0fa7-f91f-484c-9391-1b54007d0ae9"). InnerVolumeSpecName "kube-api-access-vwv42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.818104 4723 generic.go:334] "Generic (PLEG): container finished" podID="aade0fa7-f91f-484c-9391-1b54007d0ae9" containerID="86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8" exitCode=0 Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.818143 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-cjs2v" event={"ID":"aade0fa7-f91f-484c-9391-1b54007d0ae9","Type":"ContainerDied","Data":"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8"} Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.818172 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-cjs2v" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.818189 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-cjs2v" event={"ID":"aade0fa7-f91f-484c-9391-1b54007d0ae9","Type":"ContainerDied","Data":"c3d4258fbb53d6fb8b68455c11b640214491730bb8e4ae194542848ca0b7000c"} Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.818208 4723 scope.go:117] "RemoveContainer" containerID="86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.835150 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwv42\" (UniqueName: \"kubernetes.io/projected/aade0fa7-f91f-484c-9391-1b54007d0ae9-kube-api-access-vwv42\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.837820 4723 scope.go:117] "RemoveContainer" containerID="86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8" Dec 11 15:41:02 crc kubenswrapper[4723]: E1211 15:41:02.838373 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8\": container with ID starting with 86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8 not found: ID does not exist" containerID="86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.838404 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8"} err="failed to get container status \"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8\": rpc error: code = NotFound desc = could not find container \"86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8\": container with ID starting with 86a6b4c34f0b74cda3de495c28ad4b70927888984743f036def009e14cd264f8 not found: ID does not exist" Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.848290 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:41:02 crc kubenswrapper[4723]: I1211 15:41:02.853155 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-cjs2v"] Dec 11 15:41:03 crc kubenswrapper[4723]: I1211 15:41:03.556932 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aade0fa7-f91f-484c-9391-1b54007d0ae9" path="/var/lib/kubelet/pods/aade0fa7-f91f-484c-9391-1b54007d0ae9/volumes" Dec 11 15:41:22 crc kubenswrapper[4723]: I1211 15:41:22.961370 4723 generic.go:334] "Generic (PLEG): container finished" podID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerID="d972e00504470beb44b1a83ba5ce3a89cda9e765b1fa593aa266b9864ca0b08a" exitCode=1 Dec 11 15:41:22 crc kubenswrapper[4723]: I1211 15:41:22.961600 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerDied","Data":"d972e00504470beb44b1a83ba5ce3a89cda9e765b1fa593aa266b9864ca0b08a"} Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.265785 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335036 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335272 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335332 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335364 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335384 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335420 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88kj7\" (UniqueName: \"kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.335456 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher\") pod \"29e72b8f-5cf3-411c-af28-2d725b9223bc\" (UID: \"29e72b8f-5cf3-411c-af28-2d725b9223bc\") " Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.356221 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7" (OuterVolumeSpecName: "kube-api-access-88kj7") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "kube-api-access-88kj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.365823 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.368251 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.369902 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.373177 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.375469 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.385009 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "29e72b8f-5cf3-411c-af28-2d725b9223bc" (UID: "29e72b8f-5cf3-411c-af28-2d725b9223bc"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437887 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437928 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437940 4723 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437950 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88kj7\" (UniqueName: \"kubernetes.io/projected/29e72b8f-5cf3-411c-af28-2d725b9223bc-kube-api-access-88kj7\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437960 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.437989 4723 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.438001 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/29e72b8f-5cf3-411c-af28-2d725b9223bc-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.974808 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kkg48" event={"ID":"29e72b8f-5cf3-411c-af28-2d725b9223bc","Type":"ContainerDied","Data":"8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e"} Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.975117 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d9c7676e9e9bb50fcd0360bca1484646f302702fcaa0c9e03ca3078b794429e" Dec 11 15:41:24 crc kubenswrapper[4723]: I1211 15:41:24.974861 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kkg48" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.025109 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kjpll"] Dec 11 15:41:32 crc kubenswrapper[4723]: E1211 15:41:32.025949 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-ceilometer" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.025982 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-ceilometer" Dec 11 15:41:32 crc kubenswrapper[4723]: E1211 15:41:32.025996 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-collectd" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026003 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-collectd" Dec 11 15:41:32 crc kubenswrapper[4723]: E1211 15:41:32.026012 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aade0fa7-f91f-484c-9391-1b54007d0ae9" containerName="registry-server" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026020 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="aade0fa7-f91f-484c-9391-1b54007d0ae9" containerName="registry-server" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026170 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-collectd" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026189 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="aade0fa7-f91f-484c-9391-1b54007d0ae9" containerName="registry-server" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026205 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e72b8f-5cf3-411c-af28-2d725b9223bc" containerName="smoketest-ceilometer" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.026920 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.029198 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.029486 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.029794 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.030010 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.030220 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.030400 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.045575 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kjpll"] Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165277 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165327 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165366 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165395 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165441 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcmh2\" (UniqueName: \"kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165469 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.165528 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267232 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267301 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcmh2\" (UniqueName: \"kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267342 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267380 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267404 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267439 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.267477 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.268267 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.268418 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.268654 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.269074 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.269374 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.269521 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.285996 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcmh2\" (UniqueName: \"kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2\") pod \"stf-smoketest-smoke1-kjpll\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.348737 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:41:32 crc kubenswrapper[4723]: I1211 15:41:32.845656 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-kjpll"] Dec 11 15:41:33 crc kubenswrapper[4723]: I1211 15:41:33.028984 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerStarted","Data":"602a05efd210218540354380f0860632716c70571e248920922d3ce49c2181a0"} Dec 11 15:41:33 crc kubenswrapper[4723]: I1211 15:41:33.029311 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerStarted","Data":"de06d06a96109b734804bb8050561bc49d052613fc553af7c3bc88a837dcf9ad"} Dec 11 15:41:34 crc kubenswrapper[4723]: I1211 15:41:34.036679 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerStarted","Data":"e12a8e59c71988d9bf02815ee2584c586220f9363643c101d2469f491b9995da"} Dec 11 15:42:05 crc kubenswrapper[4723]: I1211 15:42:05.464017 4723 generic.go:334] "Generic (PLEG): container finished" podID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerID="e12a8e59c71988d9bf02815ee2584c586220f9363643c101d2469f491b9995da" exitCode=1 Dec 11 15:42:05 crc kubenswrapper[4723]: I1211 15:42:05.464075 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerDied","Data":"e12a8e59c71988d9bf02815ee2584c586220f9363643c101d2469f491b9995da"} Dec 11 15:42:05 crc kubenswrapper[4723]: I1211 15:42:05.465911 4723 scope.go:117] "RemoveContainer" containerID="e12a8e59c71988d9bf02815ee2584c586220f9363643c101d2469f491b9995da" Dec 11 15:42:06 crc kubenswrapper[4723]: I1211 15:42:06.472022 4723 generic.go:334] "Generic (PLEG): container finished" podID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerID="602a05efd210218540354380f0860632716c70571e248920922d3ce49c2181a0" exitCode=1 Dec 11 15:42:06 crc kubenswrapper[4723]: I1211 15:42:06.472071 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerDied","Data":"602a05efd210218540354380f0860632716c70571e248920922d3ce49c2181a0"} Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.729250 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.891702 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.891771 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.891846 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.891951 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcmh2\" (UniqueName: \"kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.894273 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.894534 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.894569 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script\") pod \"40451783-1a63-4f40-b4b3-4a0307c4ce43\" (UID: \"40451783-1a63-4f40-b4b3-4a0307c4ce43\") " Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.901505 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2" (OuterVolumeSpecName: "kube-api-access-dcmh2") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "kube-api-access-dcmh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.913209 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.913773 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.915378 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.916761 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.923476 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.926489 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "40451783-1a63-4f40-b4b3-4a0307c4ce43" (UID: "40451783-1a63-4f40-b4b3-4a0307c4ce43"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.995432 4723 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.995716 4723 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.995783 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.995860 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcmh2\" (UniqueName: \"kubernetes.io/projected/40451783-1a63-4f40-b4b3-4a0307c4ce43-kube-api-access-dcmh2\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.995928 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.996025 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:07 crc kubenswrapper[4723]: I1211 15:42:07.996125 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/40451783-1a63-4f40-b4b3-4a0307c4ce43-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:42:08 crc kubenswrapper[4723]: I1211 15:42:08.486093 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-kjpll" event={"ID":"40451783-1a63-4f40-b4b3-4a0307c4ce43","Type":"ContainerDied","Data":"de06d06a96109b734804bb8050561bc49d052613fc553af7c3bc88a837dcf9ad"} Dec 11 15:42:08 crc kubenswrapper[4723]: I1211 15:42:08.486481 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de06d06a96109b734804bb8050561bc49d052613fc553af7c3bc88a837dcf9ad" Dec 11 15:42:08 crc kubenswrapper[4723]: I1211 15:42:08.486138 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-kjpll" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.023643 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dkhcc"] Dec 11 15:42:26 crc kubenswrapper[4723]: E1211 15:42:26.024521 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-collectd" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.024538 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-collectd" Dec 11 15:42:26 crc kubenswrapper[4723]: E1211 15:42:26.024558 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-ceilometer" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.024565 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-ceilometer" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.024711 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-collectd" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.024736 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="40451783-1a63-4f40-b4b3-4a0307c4ce43" containerName="smoketest-ceilometer" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.025515 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.027804 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.029170 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.030034 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.030047 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.030361 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.030612 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.049008 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dkhcc"] Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.049897 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.049960 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.050134 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.050203 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.050363 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8hr9\" (UniqueName: \"kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.050403 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.050432 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151211 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8hr9\" (UniqueName: \"kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151264 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151292 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151329 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151357 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151387 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.151406 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.152445 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.152524 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.152713 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.152720 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.153316 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.153416 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.173390 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8hr9\" (UniqueName: \"kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9\") pod \"stf-smoketest-smoke1-dkhcc\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.342012 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:42:26 crc kubenswrapper[4723]: I1211 15:42:26.794768 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dkhcc"] Dec 11 15:42:27 crc kubenswrapper[4723]: I1211 15:42:27.612600 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerStarted","Data":"86073f593a23a9d8be0c18883ec8da18b3097b4a653dd3a0cd24598f0ebfc7b3"} Dec 11 15:42:27 crc kubenswrapper[4723]: I1211 15:42:27.612994 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerStarted","Data":"613a3492136cb3e5d570b1b6cd6137cf1ec357a478f0fc7e8d0a75e6f218bf57"} Dec 11 15:42:27 crc kubenswrapper[4723]: I1211 15:42:27.613005 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerStarted","Data":"e05839e63099b3939919a17bc0cfd6f22ee190bcff27f2c1b6e6f725ed889f7a"} Dec 11 15:42:27 crc kubenswrapper[4723]: I1211 15:42:27.629633 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" podStartSLOduration=1.629533637 podStartE2EDuration="1.629533637s" podCreationTimestamp="2025-12-11 15:42:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 15:42:27.627115053 +0000 UTC m=+1158.401348488" watchObservedRunningTime="2025-12-11 15:42:27.629533637 +0000 UTC m=+1158.403767072" Dec 11 15:42:43 crc kubenswrapper[4723]: I1211 15:42:43.744784 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:42:43 crc kubenswrapper[4723]: I1211 15:42:43.745399 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:43:00 crc kubenswrapper[4723]: I1211 15:43:00.844730 4723 generic.go:334] "Generic (PLEG): container finished" podID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerID="86073f593a23a9d8be0c18883ec8da18b3097b4a653dd3a0cd24598f0ebfc7b3" exitCode=0 Dec 11 15:43:00 crc kubenswrapper[4723]: I1211 15:43:00.845250 4723 generic.go:334] "Generic (PLEG): container finished" podID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerID="613a3492136cb3e5d570b1b6cd6137cf1ec357a478f0fc7e8d0a75e6f218bf57" exitCode=0 Dec 11 15:43:00 crc kubenswrapper[4723]: I1211 15:43:00.844811 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerDied","Data":"86073f593a23a9d8be0c18883ec8da18b3097b4a653dd3a0cd24598f0ebfc7b3"} Dec 11 15:43:00 crc kubenswrapper[4723]: I1211 15:43:00.845294 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerDied","Data":"613a3492136cb3e5d570b1b6cd6137cf1ec357a478f0fc7e8d0a75e6f218bf57"} Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.113135 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177321 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177391 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177457 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177490 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177553 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177570 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.177642 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8hr9\" (UniqueName: \"kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9\") pod \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\" (UID: \"ed6d5b79-ff0c-4df8-9887-d1f691e9385a\") " Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.195887 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.195945 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.196636 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.196664 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.197262 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9" (OuterVolumeSpecName: "kube-api-access-p8hr9") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "kube-api-access-p8hr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.199784 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.206433 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "ed6d5b79-ff0c-4df8-9887-d1f691e9385a" (UID: "ed6d5b79-ff0c-4df8-9887-d1f691e9385a"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279522 4723 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-healthcheck-log\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279589 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279598 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279610 4723 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-sensubility-config\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279618 4723 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279627 4723 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.279635 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8hr9\" (UniqueName: \"kubernetes.io/projected/ed6d5b79-ff0c-4df8-9887-d1f691e9385a-kube-api-access-p8hr9\") on node \"crc\" DevicePath \"\"" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.866161 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" event={"ID":"ed6d5b79-ff0c-4df8-9887-d1f691e9385a","Type":"ContainerDied","Data":"e05839e63099b3939919a17bc0cfd6f22ee190bcff27f2c1b6e6f725ed889f7a"} Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.866199 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e05839e63099b3939919a17bc0cfd6f22ee190bcff27f2c1b6e6f725ed889f7a" Dec 11 15:43:02 crc kubenswrapper[4723]: I1211 15:43:02.866261 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dkhcc" Dec 11 15:43:03 crc kubenswrapper[4723]: I1211 15:43:03.886314 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-dkhcc_ed6d5b79-ff0c-4df8-9887-d1f691e9385a/smoketest-collectd/0.log" Dec 11 15:43:04 crc kubenswrapper[4723]: I1211 15:43:04.164452 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-dkhcc_ed6d5b79-ff0c-4df8-9887-d1f691e9385a/smoketest-ceilometer/0.log" Dec 11 15:43:04 crc kubenswrapper[4723]: I1211 15:43:04.442060 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-mcch7_bd487b99-e69c-4809-a266-490e9ea7789f/default-interconnect/0.log" Dec 11 15:43:04 crc kubenswrapper[4723]: I1211 15:43:04.713069 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9_5ef35e91-5f1f-4463-aded-49de8810267a/bridge/1.log" Dec 11 15:43:04 crc kubenswrapper[4723]: I1211 15:43:04.981327 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-ts4p9_5ef35e91-5f1f-4463-aded-49de8810267a/sg-core/0.log" Dec 11 15:43:05 crc kubenswrapper[4723]: I1211 15:43:05.298015 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk_47690000-d05f-44e8-a913-37a9e225d35f/bridge/2.log" Dec 11 15:43:05 crc kubenswrapper[4723]: I1211 15:43:05.645117 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-74d7fd7df9-96vsk_47690000-d05f-44e8-a913-37a9e225d35f/sg-core/0.log" Dec 11 15:43:05 crc kubenswrapper[4723]: I1211 15:43:05.907371 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4_be4db1ae-f8d8-4739-ac2c-148857809916/bridge/2.log" Dec 11 15:43:06 crc kubenswrapper[4723]: I1211 15:43:06.156196 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-gdzl4_be4db1ae-f8d8-4739-ac2c-148857809916/sg-core/0.log" Dec 11 15:43:06 crc kubenswrapper[4723]: I1211 15:43:06.402435 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9_07f08f70-3f7a-4fa0-8a3b-8e57366cc516/bridge/2.log" Dec 11 15:43:06 crc kubenswrapper[4723]: I1211 15:43:06.665085 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-857fcf8c4f-qzzc9_07f08f70-3f7a-4fa0-8a3b-8e57366cc516/sg-core/0.log" Dec 11 15:43:06 crc kubenswrapper[4723]: I1211 15:43:06.944222 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d_5bd859dc-e15a-476e-8a7c-6e9c8effba35/bridge/2.log" Dec 11 15:43:07 crc kubenswrapper[4723]: I1211 15:43:07.252877 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-w4t8d_5bd859dc-e15a-476e-8a7c-6e9c8effba35/sg-core/0.log" Dec 11 15:43:10 crc kubenswrapper[4723]: I1211 15:43:10.080366 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-76d45b648-v762h_567782db-f350-4118-aa1b-c7008a51a4c6/operator/0.log" Dec 11 15:43:10 crc kubenswrapper[4723]: I1211 15:43:10.361757 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_917c8eb2-66c9-4b10-b8f5-1c3b6e3c7123/prometheus/0.log" Dec 11 15:43:10 crc kubenswrapper[4723]: I1211 15:43:10.596700 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_e6c2c683-4358-47d5-b5cd-e97c588b965e/elasticsearch/0.log" Dec 11 15:43:10 crc kubenswrapper[4723]: I1211 15:43:10.880744 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-pvfdq_d86b58d7-d8ba-4d6b-8b06-3013e693f293/prometheus-webhook-snmp/0.log" Dec 11 15:43:11 crc kubenswrapper[4723]: I1211 15:43:11.110188 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_667fd4db-55d2-4e83-8f5b-73ffd8051429/alertmanager/0.log" Dec 11 15:43:13 crc kubenswrapper[4723]: I1211 15:43:13.745325 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:43:13 crc kubenswrapper[4723]: I1211 15:43:13.745955 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:43:24 crc kubenswrapper[4723]: I1211 15:43:24.559704 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-74554d9685-hm62n_c53c8e29-2540-4c3c-834d-365514555685/operator/0.log" Dec 11 15:43:27 crc kubenswrapper[4723]: I1211 15:43:27.293108 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-76d45b648-v762h_567782db-f350-4118-aa1b-c7008a51a4c6/operator/0.log" Dec 11 15:43:27 crc kubenswrapper[4723]: I1211 15:43:27.565718 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_b217a81f-1bb9-41fa-89af-3304819123d9/qdr/0.log" Dec 11 15:43:43 crc kubenswrapper[4723]: I1211 15:43:43.744954 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:43:43 crc kubenswrapper[4723]: I1211 15:43:43.745555 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:43:43 crc kubenswrapper[4723]: I1211 15:43:43.745609 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:43:43 crc kubenswrapper[4723]: I1211 15:43:43.746336 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:43:43 crc kubenswrapper[4723]: I1211 15:43:43.746399 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136" gracePeriod=600 Dec 11 15:43:44 crc kubenswrapper[4723]: I1211 15:43:44.146028 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136" exitCode=0 Dec 11 15:43:44 crc kubenswrapper[4723]: I1211 15:43:44.146067 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136"} Dec 11 15:43:44 crc kubenswrapper[4723]: I1211 15:43:44.146366 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"1cd76c0de71f08b2c0733562cf2b8f05fc5bbc38615d3c43238413dde61b1430"} Dec 11 15:43:44 crc kubenswrapper[4723]: I1211 15:43:44.146393 4723 scope.go:117] "RemoveContainer" containerID="8b706cc1ae9efdd0fd2da504cb2d8b7ce339d544b3cc83f32508f322f998dd8e" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.884721 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jjjgh/must-gather-5h5tb"] Dec 11 15:43:51 crc kubenswrapper[4723]: E1211 15:43:51.885858 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-ceilometer" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.885877 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-ceilometer" Dec 11 15:43:51 crc kubenswrapper[4723]: E1211 15:43:51.885908 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-collectd" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.885917 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-collectd" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.886120 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-collectd" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.886138 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed6d5b79-ff0c-4df8-9887-d1f691e9385a" containerName="smoketest-ceilometer" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.887025 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.890266 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jjjgh"/"openshift-service-ca.crt" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.890359 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jjjgh"/"default-dockercfg-sjvfc" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.890438 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jjjgh"/"kube-root-ca.crt" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.902379 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jjjgh/must-gather-5h5tb"] Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.902911 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:51 crc kubenswrapper[4723]: I1211 15:43:51.902958 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwbps\" (UniqueName: \"kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.004005 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.004064 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwbps\" (UniqueName: \"kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.004921 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.028562 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwbps\" (UniqueName: \"kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps\") pod \"must-gather-5h5tb\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.211382 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:43:52 crc kubenswrapper[4723]: I1211 15:43:52.869226 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jjjgh/must-gather-5h5tb"] Dec 11 15:43:53 crc kubenswrapper[4723]: I1211 15:43:53.228816 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" event={"ID":"167bd41f-f0a5-4750-84eb-a98390d93d67","Type":"ContainerStarted","Data":"16f69ff6370e5863e093e2e3464aef6ffed4b85739b654cd595da6740302815b"} Dec 11 15:44:03 crc kubenswrapper[4723]: I1211 15:44:03.344924 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" event={"ID":"167bd41f-f0a5-4750-84eb-a98390d93d67","Type":"ContainerStarted","Data":"bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0"} Dec 11 15:44:03 crc kubenswrapper[4723]: I1211 15:44:03.345595 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" event={"ID":"167bd41f-f0a5-4750-84eb-a98390d93d67","Type":"ContainerStarted","Data":"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c"} Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.627365 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" podStartSLOduration=43.000055812 podStartE2EDuration="52.62734487s" podCreationTimestamp="2025-12-11 15:43:51 +0000 UTC" firstStartedPulling="2025-12-11 15:43:52.867789636 +0000 UTC m=+1243.642023061" lastFinishedPulling="2025-12-11 15:44:02.495078684 +0000 UTC m=+1253.269312119" observedRunningTime="2025-12-11 15:44:03.366122904 +0000 UTC m=+1254.140356339" watchObservedRunningTime="2025-12-11 15:44:43.62734487 +0000 UTC m=+1294.401578315" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.632848 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-md8tw"] Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.634285 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.653601 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-md8tw"] Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.670808 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-catalog-content\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.670871 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k5vm\" (UniqueName: \"kubernetes.io/projected/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-kube-api-access-4k5vm\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.670899 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-utilities\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.772827 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-catalog-content\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.772899 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k5vm\" (UniqueName: \"kubernetes.io/projected/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-kube-api-access-4k5vm\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.772959 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-utilities\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.773524 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-catalog-content\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.773549 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-utilities\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.803591 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k5vm\" (UniqueName: \"kubernetes.io/projected/ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231-kube-api-access-4k5vm\") pod \"redhat-operators-md8tw\" (UID: \"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231\") " pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:43 crc kubenswrapper[4723]: I1211 15:44:43.951513 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.216887 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-md8tw"] Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.612393 4723 generic.go:334] "Generic (PLEG): container finished" podID="ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231" containerID="a0e2ab7f3a5e9b7126eb9bb9948612dd2f8f3868ffa9c4501b696bae99dbdf3a" exitCode=0 Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.612506 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md8tw" event={"ID":"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231","Type":"ContainerDied","Data":"a0e2ab7f3a5e9b7126eb9bb9948612dd2f8f3868ffa9c4501b696bae99dbdf3a"} Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.612752 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md8tw" event={"ID":"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231","Type":"ContainerStarted","Data":"7d5804f1afc2951b2b6431c6434a537eed2189e55a2b5d099be7d37e947d2091"} Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.614000 4723 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 15:44:44 crc kubenswrapper[4723]: I1211 15:44:44.898096 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kv42r_7b651358-a4e6-40b5-a8db-f4108332e022/control-plane-machine-set-operator/0.log" Dec 11 15:44:45 crc kubenswrapper[4723]: I1211 15:44:45.108512 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-ndhbx_810dc990-b95f-403e-ab68-5c65f34396bf/kube-rbac-proxy/0.log" Dec 11 15:44:45 crc kubenswrapper[4723]: I1211 15:44:45.160505 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-ndhbx_810dc990-b95f-403e-ab68-5c65f34396bf/machine-api-operator/0.log" Dec 11 15:44:57 crc kubenswrapper[4723]: I1211 15:44:57.122059 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-s9w7v_b5e4ec1c-aace-40d8-9e95-3f0a2ab9fdae/cert-manager-controller/0.log" Dec 11 15:44:57 crc kubenswrapper[4723]: I1211 15:44:57.262667 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-tc9gs_8648dc98-8bab-4d61-8669-36126c074dae/cert-manager-cainjector/0.log" Dec 11 15:44:57 crc kubenswrapper[4723]: I1211 15:44:57.371394 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-8jg8v_157016c0-6dd9-40c5-a132-0efa815cc2a2/cert-manager-webhook/0.log" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.140594 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs"] Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.141740 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.143823 4723 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.144032 4723 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.158748 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs"] Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.198379 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2hbl\" (UniqueName: \"kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.198501 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.198577 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.300859 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.301041 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2hbl\" (UniqueName: \"kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.301099 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.302094 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.306670 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.318681 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2hbl\" (UniqueName: \"kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl\") pod \"collect-profiles-29424465-bfqfs\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:00 crc kubenswrapper[4723]: I1211 15:45:00.458321 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.170672 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.171897 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.184352 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.214462 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.214607 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.214660 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzmrg\" (UniqueName: \"kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.316502 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzmrg\" (UniqueName: \"kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.316596 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.316709 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.317103 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.317221 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.334609 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzmrg\" (UniqueName: \"kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg\") pod \"certified-operators-x9t9d\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:01 crc kubenswrapper[4723]: I1211 15:45:01.488540 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:04 crc kubenswrapper[4723]: I1211 15:45:04.360490 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs"] Dec 11 15:45:04 crc kubenswrapper[4723]: I1211 15:45:04.493295 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:04 crc kubenswrapper[4723]: W1211 15:45:04.508911 4723 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod733562a4_b7d8_4125_a5a0_ff717aed961f.slice/crio-3eba405784f4ebd4989a725535c3c2b591299d7818ad129ff56898a8392fcf25 WatchSource:0}: Error finding container 3eba405784f4ebd4989a725535c3c2b591299d7818ad129ff56898a8392fcf25: Status 404 returned error can't find the container with id 3eba405784f4ebd4989a725535c3c2b591299d7818ad129ff56898a8392fcf25 Dec 11 15:45:04 crc kubenswrapper[4723]: I1211 15:45:04.763353 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" event={"ID":"33b83e2a-1482-4497-ad4c-352205723b7d","Type":"ContainerStarted","Data":"efad7fb59d48baf213f159d77454af364dc4fc9846680dcfaccc6049ca38cbc1"} Dec 11 15:45:04 crc kubenswrapper[4723]: I1211 15:45:04.764543 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerStarted","Data":"3eba405784f4ebd4989a725535c3c2b591299d7818ad129ff56898a8392fcf25"} Dec 11 15:45:05 crc kubenswrapper[4723]: E1211 15:45:05.211025 4723 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 15:45:05 crc kubenswrapper[4723]: E1211 15:45:05.211202 4723 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4k5vm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-md8tw_openshift-marketplace(ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 15:45:05 crc kubenswrapper[4723]: E1211 15:45:05.212381 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-md8tw" podUID="ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231" Dec 11 15:45:05 crc kubenswrapper[4723]: I1211 15:45:05.772489 4723 generic.go:334] "Generic (PLEG): container finished" podID="33b83e2a-1482-4497-ad4c-352205723b7d" containerID="afd9cadbc08c5b7401769716098d74d764b1f16fae357b6faa58125f045c3bd8" exitCode=0 Dec 11 15:45:05 crc kubenswrapper[4723]: I1211 15:45:05.772665 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" event={"ID":"33b83e2a-1482-4497-ad4c-352205723b7d","Type":"ContainerDied","Data":"afd9cadbc08c5b7401769716098d74d764b1f16fae357b6faa58125f045c3bd8"} Dec 11 15:45:05 crc kubenswrapper[4723]: I1211 15:45:05.774696 4723 generic.go:334] "Generic (PLEG): container finished" podID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerID="4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6" exitCode=0 Dec 11 15:45:05 crc kubenswrapper[4723]: I1211 15:45:05.775447 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerDied","Data":"4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6"} Dec 11 15:45:05 crc kubenswrapper[4723]: E1211 15:45:05.776625 4723 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-md8tw" podUID="ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.057275 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.107439 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2hbl\" (UniqueName: \"kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl\") pod \"33b83e2a-1482-4497-ad4c-352205723b7d\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.107555 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume\") pod \"33b83e2a-1482-4497-ad4c-352205723b7d\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.107621 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume\") pod \"33b83e2a-1482-4497-ad4c-352205723b7d\" (UID: \"33b83e2a-1482-4497-ad4c-352205723b7d\") " Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.108578 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume" (OuterVolumeSpecName: "config-volume") pod "33b83e2a-1482-4497-ad4c-352205723b7d" (UID: "33b83e2a-1482-4497-ad4c-352205723b7d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.113727 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl" (OuterVolumeSpecName: "kube-api-access-t2hbl") pod "33b83e2a-1482-4497-ad4c-352205723b7d" (UID: "33b83e2a-1482-4497-ad4c-352205723b7d"). InnerVolumeSpecName "kube-api-access-t2hbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.114418 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "33b83e2a-1482-4497-ad4c-352205723b7d" (UID: "33b83e2a-1482-4497-ad4c-352205723b7d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.209104 4723 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b83e2a-1482-4497-ad4c-352205723b7d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.209139 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2hbl\" (UniqueName: \"kubernetes.io/projected/33b83e2a-1482-4497-ad4c-352205723b7d-kube-api-access-t2hbl\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.209150 4723 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b83e2a-1482-4497-ad4c-352205723b7d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.796534 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" event={"ID":"33b83e2a-1482-4497-ad4c-352205723b7d","Type":"ContainerDied","Data":"efad7fb59d48baf213f159d77454af364dc4fc9846680dcfaccc6049ca38cbc1"} Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.796940 4723 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efad7fb59d48baf213f159d77454af364dc4fc9846680dcfaccc6049ca38cbc1" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.796577 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424465-bfqfs" Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.798930 4723 generic.go:334] "Generic (PLEG): container finished" podID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerID="1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3" exitCode=0 Dec 11 15:45:07 crc kubenswrapper[4723]: I1211 15:45:07.798960 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerDied","Data":"1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3"} Dec 11 15:45:08 crc kubenswrapper[4723]: I1211 15:45:08.807919 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerStarted","Data":"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94"} Dec 11 15:45:08 crc kubenswrapper[4723]: I1211 15:45:08.829167 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x9t9d" podStartSLOduration=5.365428698 podStartE2EDuration="7.829148609s" podCreationTimestamp="2025-12-11 15:45:01 +0000 UTC" firstStartedPulling="2025-12-11 15:45:05.776305833 +0000 UTC m=+1316.550539258" lastFinishedPulling="2025-12-11 15:45:08.240025734 +0000 UTC m=+1319.014259169" observedRunningTime="2025-12-11 15:45:08.826517889 +0000 UTC m=+1319.600751324" watchObservedRunningTime="2025-12-11 15:45:08.829148609 +0000 UTC m=+1319.603382044" Dec 11 15:45:11 crc kubenswrapper[4723]: I1211 15:45:11.489435 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:11 crc kubenswrapper[4723]: I1211 15:45:11.491042 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:11 crc kubenswrapper[4723]: I1211 15:45:11.537741 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.486218 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/util/0.log" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.665570 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/util/0.log" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.698226 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/pull/0.log" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.717452 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/pull/0.log" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.912720 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/util/0.log" Dec 11 15:45:12 crc kubenswrapper[4723]: I1211 15:45:12.979918 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/pull/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.233651 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/util/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.234734 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931alhj5z_c62e47fd-6638-4d88-a884-a0b2bef6b59f/extract/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.268988 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/pull/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.275155 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/util/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.403876 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/pull/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.561530 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/pull/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.578690 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/util/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.600540 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210jrgs9_f441b93b-02ba-484f-90a6-c0b1eb50fade/extract/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.773300 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/util/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.926497 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/util/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.951013 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/pull/0.log" Dec 11 15:45:13 crc kubenswrapper[4723]: I1211 15:45:13.968831 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/pull/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.099657 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/util/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.101538 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/pull/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.144514 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fxsrgt_6eb9c879-45c4-4082-b6ff-fe25f5a82211/extract/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.247417 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/util/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.457064 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/util/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.493444 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/pull/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.493679 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/pull/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.628372 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/util/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.646220 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/pull/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.654011 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ep7wpz_90635432-311d-4b2c-8d71-bbd56f653a41/extract/0.log" Dec 11 15:45:14 crc kubenswrapper[4723]: I1211 15:45:14.807262 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-utilities/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.017622 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-content/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.020981 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-utilities/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.031099 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-content/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.175909 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-utilities/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.176068 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/extract-content/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.427390 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-utilities/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.592456 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-42q2h_f0c65aaf-bb95-4a84-8b09-ec8c41da00f5/registry-server/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.622469 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-utilities/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.669638 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-content/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.709313 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-content/0.log" Dec 11 15:45:15 crc kubenswrapper[4723]: I1211 15:45:15.924088 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-utilities/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.000302 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/registry-server/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.030950 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x9t9d_733562a4-b7d8-4125-a5a0-ff717aed961f/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.142632 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-utilities/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.323451 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-utilities/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.340449 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.344771 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.477309 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-utilities/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.501192 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.518011 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pkdkv_4d28cf96-8fcf-4934-96bb-36f0482583aa/marketplace-operator/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.684088 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dh49d_60e763b8-dc9d-4e68-85d5-de6e980b7345/registry-server/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.726669 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-utilities/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.904004 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.905500 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-content/0.log" Dec 11 15:45:16 crc kubenswrapper[4723]: I1211 15:45:16.905571 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-utilities/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.063604 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-utilities/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.095344 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/extract-content/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.181742 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-md8tw_ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231/extract-utilities/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.328954 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-b9d67_0eb578a6-70ec-41a6-8823-da3f87d6f591/registry-server/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.418029 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-md8tw_ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231/extract-utilities/0.log" Dec 11 15:45:17 crc kubenswrapper[4723]: I1211 15:45:17.574606 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-md8tw_ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231/extract-utilities/0.log" Dec 11 15:45:21 crc kubenswrapper[4723]: I1211 15:45:21.537658 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:21 crc kubenswrapper[4723]: I1211 15:45:21.592203 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:21 crc kubenswrapper[4723]: I1211 15:45:21.955148 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md8tw" event={"ID":"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231","Type":"ContainerStarted","Data":"7b47f913e38dc8f2475235fb07768229a3640c5b7300db80fe656a7791ac977d"} Dec 11 15:45:21 crc kubenswrapper[4723]: I1211 15:45:21.955306 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x9t9d" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="registry-server" containerID="cri-o://292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94" gracePeriod=2 Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.629250 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.813073 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzmrg\" (UniqueName: \"kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg\") pod \"733562a4-b7d8-4125-a5a0-ff717aed961f\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.813240 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities\") pod \"733562a4-b7d8-4125-a5a0-ff717aed961f\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.814123 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content\") pod \"733562a4-b7d8-4125-a5a0-ff717aed961f\" (UID: \"733562a4-b7d8-4125-a5a0-ff717aed961f\") " Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.814053 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities" (OuterVolumeSpecName: "utilities") pod "733562a4-b7d8-4125-a5a0-ff717aed961f" (UID: "733562a4-b7d8-4125-a5a0-ff717aed961f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.816425 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.823335 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg" (OuterVolumeSpecName: "kube-api-access-tzmrg") pod "733562a4-b7d8-4125-a5a0-ff717aed961f" (UID: "733562a4-b7d8-4125-a5a0-ff717aed961f"). InnerVolumeSpecName "kube-api-access-tzmrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.869613 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "733562a4-b7d8-4125-a5a0-ff717aed961f" (UID: "733562a4-b7d8-4125-a5a0-ff717aed961f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.917703 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzmrg\" (UniqueName: \"kubernetes.io/projected/733562a4-b7d8-4125-a5a0-ff717aed961f-kube-api-access-tzmrg\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.917748 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/733562a4-b7d8-4125-a5a0-ff717aed961f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.964283 4723 generic.go:334] "Generic (PLEG): container finished" podID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerID="292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94" exitCode=0 Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.964325 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerDied","Data":"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94"} Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.964402 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x9t9d" event={"ID":"733562a4-b7d8-4125-a5a0-ff717aed961f","Type":"ContainerDied","Data":"3eba405784f4ebd4989a725535c3c2b591299d7818ad129ff56898a8392fcf25"} Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.964424 4723 scope.go:117] "RemoveContainer" containerID="292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.964892 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x9t9d" Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.966494 4723 generic.go:334] "Generic (PLEG): container finished" podID="ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231" containerID="7b47f913e38dc8f2475235fb07768229a3640c5b7300db80fe656a7791ac977d" exitCode=0 Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.966520 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md8tw" event={"ID":"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231","Type":"ContainerDied","Data":"7b47f913e38dc8f2475235fb07768229a3640c5b7300db80fe656a7791ac977d"} Dec 11 15:45:22 crc kubenswrapper[4723]: I1211 15:45:22.986105 4723 scope.go:117] "RemoveContainer" containerID="1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.002076 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.007186 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x9t9d"] Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.032625 4723 scope.go:117] "RemoveContainer" containerID="4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.050536 4723 scope.go:117] "RemoveContainer" containerID="292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94" Dec 11 15:45:23 crc kubenswrapper[4723]: E1211 15:45:23.052066 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94\": container with ID starting with 292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94 not found: ID does not exist" containerID="292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.052105 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94"} err="failed to get container status \"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94\": rpc error: code = NotFound desc = could not find container \"292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94\": container with ID starting with 292052df97f8df53bb59ffa895a47f4b61d49107669a907ea2439d87a3458b94 not found: ID does not exist" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.052135 4723 scope.go:117] "RemoveContainer" containerID="1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3" Dec 11 15:45:23 crc kubenswrapper[4723]: E1211 15:45:23.052626 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3\": container with ID starting with 1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3 not found: ID does not exist" containerID="1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.052655 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3"} err="failed to get container status \"1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3\": rpc error: code = NotFound desc = could not find container \"1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3\": container with ID starting with 1f894d12f4c0df2a7eb52b0b95868d989cf7cc8e57383bbbc04862a0b92ce3c3 not found: ID does not exist" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.052668 4723 scope.go:117] "RemoveContainer" containerID="4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6" Dec 11 15:45:23 crc kubenswrapper[4723]: E1211 15:45:23.053151 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6\": container with ID starting with 4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6 not found: ID does not exist" containerID="4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.053192 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6"} err="failed to get container status \"4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6\": rpc error: code = NotFound desc = could not find container \"4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6\": container with ID starting with 4d12eb489e50f6b834e2de66de3e1aa7f94b43de717b1434ddb6a490f62ed2b6 not found: ID does not exist" Dec 11 15:45:23 crc kubenswrapper[4723]: I1211 15:45:23.556697 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" path="/var/lib/kubelet/pods/733562a4-b7d8-4125-a5a0-ff717aed961f/volumes" Dec 11 15:45:25 crc kubenswrapper[4723]: I1211 15:45:25.987762 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-md8tw" event={"ID":"ff0dcd4f-3b96-4faf-8b5c-78b0dbea3231","Type":"ContainerStarted","Data":"b2275a8f64a291d032140956a04f50afb2c4c926078ba9625fc84f9e4f738baa"} Dec 11 15:45:26 crc kubenswrapper[4723]: I1211 15:45:26.008193 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-md8tw" podStartSLOduration=3.152059501 podStartE2EDuration="43.008175633s" podCreationTimestamp="2025-12-11 15:44:43 +0000 UTC" firstStartedPulling="2025-12-11 15:44:44.61375974 +0000 UTC m=+1295.387993175" lastFinishedPulling="2025-12-11 15:45:24.469875872 +0000 UTC m=+1335.244109307" observedRunningTime="2025-12-11 15:45:26.006738335 +0000 UTC m=+1336.780971800" watchObservedRunningTime="2025-12-11 15:45:26.008175633 +0000 UTC m=+1336.782409068" Dec 11 15:45:28 crc kubenswrapper[4723]: I1211 15:45:28.750847 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-vqsrg_8d747017-ba68-4bba-932d-30c1b7f21c3e/prometheus-operator/0.log" Dec 11 15:45:28 crc kubenswrapper[4723]: I1211 15:45:28.911072 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-dd5f85999-2tnjl_2dbda7da-f5c3-43c2-92a7-397c48293f0b/prometheus-operator-admission-webhook/0.log" Dec 11 15:45:28 crc kubenswrapper[4723]: I1211 15:45:28.976593 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-dd5f85999-sskx2_248a8e31-62c3-4bc6-81d7-8d603174184f/prometheus-operator-admission-webhook/0.log" Dec 11 15:45:29 crc kubenswrapper[4723]: I1211 15:45:29.169489 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-c4b5b_28e7bf38-168c-4a52-8e29-5036b9adc3ab/operator/0.log" Dec 11 15:45:29 crc kubenswrapper[4723]: I1211 15:45:29.192594 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-7j5zh_8badb2a5-9456-4325-89d3-68f8db885c95/perses-operator/0.log" Dec 11 15:45:33 crc kubenswrapper[4723]: I1211 15:45:33.951996 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:45:33 crc kubenswrapper[4723]: I1211 15:45:33.952514 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:45:34 crc kubenswrapper[4723]: I1211 15:45:34.000019 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:45:34 crc kubenswrapper[4723]: I1211 15:45:34.083869 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-md8tw" Dec 11 15:45:34 crc kubenswrapper[4723]: I1211 15:45:34.159710 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-md8tw"] Dec 11 15:45:34 crc kubenswrapper[4723]: I1211 15:45:34.251872 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:45:34 crc kubenswrapper[4723]: I1211 15:45:34.252190 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b9d67" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="registry-server" containerID="cri-o://2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" gracePeriod=2 Dec 11 15:45:36 crc kubenswrapper[4723]: E1211 15:45:36.669575 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322 is running failed: container process not found" containerID="2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:45:36 crc kubenswrapper[4723]: E1211 15:45:36.670141 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322 is running failed: container process not found" containerID="2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:45:36 crc kubenswrapper[4723]: E1211 15:45:36.670484 4723 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322 is running failed: container process not found" containerID="2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 15:45:36 crc kubenswrapper[4723]: E1211 15:45:36.670550 4723 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-b9d67" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="registry-server" Dec 11 15:45:42 crc kubenswrapper[4723]: I1211 15:45:42.805055 4723 generic.go:334] "Generic (PLEG): container finished" podID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerID="2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" exitCode=0 Dec 11 15:45:42 crc kubenswrapper[4723]: I1211 15:45:42.805523 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerDied","Data":"2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322"} Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.096708 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.218959 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities\") pod \"0eb578a6-70ec-41a6-8823-da3f87d6f591\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.219127 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content\") pod \"0eb578a6-70ec-41a6-8823-da3f87d6f591\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.219300 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8zwj\" (UniqueName: \"kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj\") pod \"0eb578a6-70ec-41a6-8823-da3f87d6f591\" (UID: \"0eb578a6-70ec-41a6-8823-da3f87d6f591\") " Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.219625 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities" (OuterVolumeSpecName: "utilities") pod "0eb578a6-70ec-41a6-8823-da3f87d6f591" (UID: "0eb578a6-70ec-41a6-8823-da3f87d6f591"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.220487 4723 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.224842 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj" (OuterVolumeSpecName: "kube-api-access-x8zwj") pod "0eb578a6-70ec-41a6-8823-da3f87d6f591" (UID: "0eb578a6-70ec-41a6-8823-da3f87d6f591"). InnerVolumeSpecName "kube-api-access-x8zwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.320901 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0eb578a6-70ec-41a6-8823-da3f87d6f591" (UID: "0eb578a6-70ec-41a6-8823-da3f87d6f591"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.321626 4723 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb578a6-70ec-41a6-8823-da3f87d6f591-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.321660 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8zwj\" (UniqueName: \"kubernetes.io/projected/0eb578a6-70ec-41a6-8823-da3f87d6f591-kube-api-access-x8zwj\") on node \"crc\" DevicePath \"\"" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.822052 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9d67" event={"ID":"0eb578a6-70ec-41a6-8823-da3f87d6f591","Type":"ContainerDied","Data":"f2b93c747adff0341c3cbc8fc9ef29b4eae8df85cb9e4788334b2f4189717f86"} Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.822464 4723 scope.go:117] "RemoveContainer" containerID="2ad0023dac39d3091461feb40881c42b6cd241f1f7b9a33139564afb755d4322" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.822665 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9d67" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.873430 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.873702 4723 scope.go:117] "RemoveContainer" containerID="374a6acd089f87060fc7dc23f4e07257d4e56c8c6d0dc818bf14fd6265b7bdf9" Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.882140 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b9d67"] Dec 11 15:45:44 crc kubenswrapper[4723]: I1211 15:45:44.898181 4723 scope.go:117] "RemoveContainer" containerID="933dd92ac674cff87f146b1fb048912aab672d4ff65cbc17208ebb6da5b0dfbd" Dec 11 15:45:45 crc kubenswrapper[4723]: I1211 15:45:45.557908 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" path="/var/lib/kubelet/pods/0eb578a6-70ec-41a6-8823-da3f87d6f591/volumes" Dec 11 15:46:13 crc kubenswrapper[4723]: I1211 15:46:13.745862 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:46:13 crc kubenswrapper[4723]: I1211 15:46:13.746564 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:46:19 crc kubenswrapper[4723]: I1211 15:46:19.068207 4723 generic.go:334] "Generic (PLEG): container finished" podID="167bd41f-f0a5-4750-84eb-a98390d93d67" containerID="fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c" exitCode=0 Dec 11 15:46:19 crc kubenswrapper[4723]: I1211 15:46:19.068297 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" event={"ID":"167bd41f-f0a5-4750-84eb-a98390d93d67","Type":"ContainerDied","Data":"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c"} Dec 11 15:46:19 crc kubenswrapper[4723]: I1211 15:46:19.069439 4723 scope.go:117] "RemoveContainer" containerID="fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c" Dec 11 15:46:19 crc kubenswrapper[4723]: I1211 15:46:19.655130 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jjjgh_must-gather-5h5tb_167bd41f-f0a5-4750-84eb-a98390d93d67/gather/0.log" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.934421 4723 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935202 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935220 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935232 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="extract-utilities" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935239 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="extract-utilities" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935251 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="extract-content" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935259 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="extract-content" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935269 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b83e2a-1482-4497-ad4c-352205723b7d" containerName="collect-profiles" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935277 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b83e2a-1482-4497-ad4c-352205723b7d" containerName="collect-profiles" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935297 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="extract-utilities" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935305 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="extract-utilities" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935317 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935326 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: E1211 15:46:24.935343 4723 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="extract-content" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935348 4723 state_mem.go:107] "Deleted CPUSet assignment" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="extract-content" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935471 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="733562a4-b7d8-4125-a5a0-ff717aed961f" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935482 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb578a6-70ec-41a6-8823-da3f87d6f591" containerName="registry-server" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.935491 4723 memory_manager.go:354] "RemoveStaleState removing state" podUID="33b83e2a-1482-4497-ad4c-352205723b7d" containerName="collect-profiles" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.936087 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:24 crc kubenswrapper[4723]: I1211 15:46:24.943727 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:25 crc kubenswrapper[4723]: I1211 15:46:25.015531 4723 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsqzh\" (UniqueName: \"kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh\") pod \"infrawatch-operators-tqkv6\" (UID: \"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9\") " pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:25 crc kubenswrapper[4723]: I1211 15:46:25.116716 4723 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsqzh\" (UniqueName: \"kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh\") pod \"infrawatch-operators-tqkv6\" (UID: \"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9\") " pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:25 crc kubenswrapper[4723]: I1211 15:46:25.134648 4723 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsqzh\" (UniqueName: \"kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh\") pod \"infrawatch-operators-tqkv6\" (UID: \"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9\") " pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:25 crc kubenswrapper[4723]: I1211 15:46:25.275082 4723 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:25 crc kubenswrapper[4723]: I1211 15:46:25.494767 4723 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.120173 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tqkv6" event={"ID":"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9","Type":"ContainerStarted","Data":"d8b236c59c8e0580b77e5a3f89222d7a5af5030ed7d19c5bda16074cb77055ec"} Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.393476 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jjjgh/must-gather-5h5tb"] Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.394082 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" podUID="167bd41f-f0a5-4750-84eb-a98390d93d67" containerName="copy" containerID="cri-o://bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0" gracePeriod=2 Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.400710 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jjjgh/must-gather-5h5tb"] Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.725882 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jjjgh_must-gather-5h5tb_167bd41f-f0a5-4750-84eb-a98390d93d67/copy/0.log" Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.726664 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.845025 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output\") pod \"167bd41f-f0a5-4750-84eb-a98390d93d67\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.845108 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwbps\" (UniqueName: \"kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps\") pod \"167bd41f-f0a5-4750-84eb-a98390d93d67\" (UID: \"167bd41f-f0a5-4750-84eb-a98390d93d67\") " Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.851543 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps" (OuterVolumeSpecName: "kube-api-access-lwbps") pod "167bd41f-f0a5-4750-84eb-a98390d93d67" (UID: "167bd41f-f0a5-4750-84eb-a98390d93d67"). InnerVolumeSpecName "kube-api-access-lwbps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.903809 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "167bd41f-f0a5-4750-84eb-a98390d93d67" (UID: "167bd41f-f0a5-4750-84eb-a98390d93d67"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.946379 4723 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/167bd41f-f0a5-4750-84eb-a98390d93d67-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 11 15:46:26 crc kubenswrapper[4723]: I1211 15:46:26.946686 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwbps\" (UniqueName: \"kubernetes.io/projected/167bd41f-f0a5-4750-84eb-a98390d93d67-kube-api-access-lwbps\") on node \"crc\" DevicePath \"\"" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.138441 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tqkv6" event={"ID":"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9","Type":"ContainerStarted","Data":"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad"} Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.147583 4723 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jjjgh_must-gather-5h5tb_167bd41f-f0a5-4750-84eb-a98390d93d67/copy/0.log" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.148220 4723 generic.go:334] "Generic (PLEG): container finished" podID="167bd41f-f0a5-4750-84eb-a98390d93d67" containerID="bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0" exitCode=143 Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.148283 4723 scope.go:117] "RemoveContainer" containerID="bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.148282 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jjjgh/must-gather-5h5tb" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.170172 4723 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-tqkv6" podStartSLOduration=2.552232176 podStartE2EDuration="3.170157382s" podCreationTimestamp="2025-12-11 15:46:24 +0000 UTC" firstStartedPulling="2025-12-11 15:46:25.499001123 +0000 UTC m=+1396.273234558" lastFinishedPulling="2025-12-11 15:46:26.116926329 +0000 UTC m=+1396.891159764" observedRunningTime="2025-12-11 15:46:27.163703099 +0000 UTC m=+1397.937936534" watchObservedRunningTime="2025-12-11 15:46:27.170157382 +0000 UTC m=+1397.944390817" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.174834 4723 scope.go:117] "RemoveContainer" containerID="fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.246535 4723 scope.go:117] "RemoveContainer" containerID="bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0" Dec 11 15:46:27 crc kubenswrapper[4723]: E1211 15:46:27.246985 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0\": container with ID starting with bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0 not found: ID does not exist" containerID="bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.247035 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0"} err="failed to get container status \"bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0\": rpc error: code = NotFound desc = could not find container \"bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0\": container with ID starting with bedc372f6da370aba383f84583e9e092739066cd5fa28567f8e55f42b1d8ada0 not found: ID does not exist" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.247089 4723 scope.go:117] "RemoveContainer" containerID="fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c" Dec 11 15:46:27 crc kubenswrapper[4723]: E1211 15:46:27.247530 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c\": container with ID starting with fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c not found: ID does not exist" containerID="fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.247558 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c"} err="failed to get container status \"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c\": rpc error: code = NotFound desc = could not find container \"fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c\": container with ID starting with fb62600f6aded1419f64729a6b113a5914ddb89df6359c07c67e16545091e99c not found: ID does not exist" Dec 11 15:46:27 crc kubenswrapper[4723]: I1211 15:46:27.557517 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="167bd41f-f0a5-4750-84eb-a98390d93d67" path="/var/lib/kubelet/pods/167bd41f-f0a5-4750-84eb-a98390d93d67/volumes" Dec 11 15:46:35 crc kubenswrapper[4723]: I1211 15:46:35.276094 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:35 crc kubenswrapper[4723]: I1211 15:46:35.276692 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:35 crc kubenswrapper[4723]: I1211 15:46:35.301324 4723 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:36 crc kubenswrapper[4723]: I1211 15:46:36.235761 4723 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:36 crc kubenswrapper[4723]: I1211 15:46:36.279923 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:38 crc kubenswrapper[4723]: I1211 15:46:38.229466 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-tqkv6" podUID="21dd88a9-be78-4e05-93b6-3ac3c1ac9af9" containerName="registry-server" containerID="cri-o://2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad" gracePeriod=2 Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.230003 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.236196 4723 generic.go:334] "Generic (PLEG): container finished" podID="21dd88a9-be78-4e05-93b6-3ac3c1ac9af9" containerID="2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad" exitCode=0 Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.236246 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tqkv6" event={"ID":"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9","Type":"ContainerDied","Data":"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad"} Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.236253 4723 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-tqkv6" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.236288 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-tqkv6" event={"ID":"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9","Type":"ContainerDied","Data":"d8b236c59c8e0580b77e5a3f89222d7a5af5030ed7d19c5bda16074cb77055ec"} Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.236311 4723 scope.go:117] "RemoveContainer" containerID="2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.255135 4723 scope.go:117] "RemoveContainer" containerID="2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad" Dec 11 15:46:39 crc kubenswrapper[4723]: E1211 15:46:39.255543 4723 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad\": container with ID starting with 2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad not found: ID does not exist" containerID="2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.255596 4723 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad"} err="failed to get container status \"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad\": rpc error: code = NotFound desc = could not find container \"2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad\": container with ID starting with 2d6b06c94dbb8a4a09368cd7ec070a54ce28c59fc46b7b7e97af5153522dc7ad not found: ID does not exist" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.319470 4723 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsqzh\" (UniqueName: \"kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh\") pod \"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9\" (UID: \"21dd88a9-be78-4e05-93b6-3ac3c1ac9af9\") " Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.325184 4723 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh" (OuterVolumeSpecName: "kube-api-access-tsqzh") pod "21dd88a9-be78-4e05-93b6-3ac3c1ac9af9" (UID: "21dd88a9-be78-4e05-93b6-3ac3c1ac9af9"). InnerVolumeSpecName "kube-api-access-tsqzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.421339 4723 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsqzh\" (UniqueName: \"kubernetes.io/projected/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9-kube-api-access-tsqzh\") on node \"crc\" DevicePath \"\"" Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.580235 4723 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:39 crc kubenswrapper[4723]: I1211 15:46:39.585646 4723 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-tqkv6"] Dec 11 15:46:41 crc kubenswrapper[4723]: I1211 15:46:41.555705 4723 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21dd88a9-be78-4e05-93b6-3ac3c1ac9af9" path="/var/lib/kubelet/pods/21dd88a9-be78-4e05-93b6-3ac3c1ac9af9/volumes" Dec 11 15:46:43 crc kubenswrapper[4723]: I1211 15:46:43.745194 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:46:43 crc kubenswrapper[4723]: I1211 15:46:43.745831 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:47:13 crc kubenswrapper[4723]: I1211 15:47:13.745431 4723 patch_prober.go:28] interesting pod/machine-config-daemon-bxzdh container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 15:47:13 crc kubenswrapper[4723]: I1211 15:47:13.746030 4723 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 15:47:13 crc kubenswrapper[4723]: I1211 15:47:13.746085 4723 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" Dec 11 15:47:13 crc kubenswrapper[4723]: I1211 15:47:13.746695 4723 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cd76c0de71f08b2c0733562cf2b8f05fc5bbc38615d3c43238413dde61b1430"} pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 15:47:13 crc kubenswrapper[4723]: I1211 15:47:13.746749 4723 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" podUID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerName="machine-config-daemon" containerID="cri-o://1cd76c0de71f08b2c0733562cf2b8f05fc5bbc38615d3c43238413dde61b1430" gracePeriod=600 Dec 11 15:47:14 crc kubenswrapper[4723]: I1211 15:47:14.193006 4723 generic.go:334] "Generic (PLEG): container finished" podID="e86455ee-3aa9-411e-b46a-ab60dcc77f95" containerID="1cd76c0de71f08b2c0733562cf2b8f05fc5bbc38615d3c43238413dde61b1430" exitCode=0 Dec 11 15:47:14 crc kubenswrapper[4723]: I1211 15:47:14.193062 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerDied","Data":"1cd76c0de71f08b2c0733562cf2b8f05fc5bbc38615d3c43238413dde61b1430"} Dec 11 15:47:14 crc kubenswrapper[4723]: I1211 15:47:14.193353 4723 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-bxzdh" event={"ID":"e86455ee-3aa9-411e-b46a-ab60dcc77f95","Type":"ContainerStarted","Data":"a8433190498c89887e6ed33809fc0f72106f9a0344fcfaf3939f0e3124f36b15"} Dec 11 15:47:14 crc kubenswrapper[4723]: I1211 15:47:14.193375 4723 scope.go:117] "RemoveContainer" containerID="e9435255d18f49a0db14745f8fb7e304867ac082eb3666ed82871694760ba136" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116563737024463 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116563740017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116560425016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116560425015462 5ustar corecore